def find_ROI(img, mask, window_ratio): mask = cv2.cvtColor(mask, cv2.COLOR_BGR2GRAY) m = skimage.measure.moments(mask) cr = m[0, 1] / m[0, 0] cc = m[1, 0] / m[0, 0] measure.moments_central(mask, cr, cc) label_mask = label(mask) ROI = [] Coords = [] for region in regionprops(label_mask): minr, minc, maxr, maxc = region.bbox x_length = abs(maxr - minr) y_length = abs(maxc - minc) x_mid = minr + (x_length / 2) y_mid = minc + (y_length / 2) if x_length < y_length: square_length = np.multiply(y_length, window_ratio) else: square_length = np.multiply(x_length, window_ratio) X_min = int(x_mid - (square_length / 2)) X_max = int(x_mid + (square_length / 2)) Y_min = int(y_mid - (square_length / 2)) Y_max = int(y_mid + (square_length / 2)) coord = [X_min, X_max, Y_min, Y_max] subRegion = np.zeros_like(img) subRegion[coord[0]:coord[1], coord[2]:coord[3]] = img[coord[0]:coord[1], coord[2]:coord[3]] ROI.append(subRegion) Coords.append(coord) return ROI, Coords
def test_moments_central_deprecated(): image = np.zeros((20, 20), dtype=np.double) image[5:-5, 5:-5] = np.random.random((10, 10)) center = moments(image, 1)[[1, 0], [0, 1]] cr, cc = center with expected_warnings(['deprecated 2D-only']): mu0 = moments_central(image, cr, cc) mu1 = moments_central(image, cr=cr, cc=cc) mu_ref = moments_central(image, center) assert_almost_equal(mu0.T, mu_ref) assert_almost_equal(mu1.T, mu_ref)
def m20(image: np.ndarray, mask: np.ndarray) -> float: r'''Calculate the M20 statistic. .. math:: M_{20} = log_{10} \left(\frac{\sum M_i} {M_{tot}}\right) .. math:: While \sum f_i < 0.2 f_{tot} .. math:: M_{tot} = \sum M_i = \sum f_i [(x - x_c)^2 + (y - y_c)^2] see Lotz et al. 2004 https://doi.org/10.1086/421849 Adapted from statmorph: https://github.com/vrodgom/statmorph Parameters ---------- image : float, 2d np.ndarray Image of galaxy mask : float [0. - 1.], 2d np.ndarray Mask which contains the pixels belonging to the galaxy of interest. Returns ------- m20 : float M20 statistic ''' # use the same image as used in Gini calculation. img = np.where(mask > 0, image, 0.) # Calculate centroid from moments M = moments(img, order=1) centroid = (M[1, 0] / M[0, 0], M[0, 1] / M[0, 0]) # Calculate 2nd order central moment Mcentral = moments_central(img, center=centroid, order=2) secondMomentTotal = Mcentral[2, 0] + Mcentral[0, 2] # sort pixels, then take top 20% of brightest pixels sortedPixels = np.sort(img.ravel()) fluxFraction = np.cumsum(sortedPixels) / np.sum(sortedPixels) thresh = sortedPixels[fluxFraction > 0.8][0] # Select pixels from the image that are the top 20% brightest # then compute M20 img20 = np.where(img >= thresh, img, 0.0) Mcentral20 = moments_central(img20, center=centroid, order=2) secondMoment20 = Mcentral20[0, 2] + Mcentral20[2, 0] m20 = np.log10(secondMoment20 / secondMomentTotal) return m20
def test_moments_normalized(): image = np.zeros((20, 20), dtype=np.double) image[13:17, 13:17] = 1 mu = moments_central(image, 14.5, 14.5) nu = moments_normalized(mu) # shift image by dx=-3, dy=-3 and scale by 0.5 image2 = np.zeros((20, 20), dtype=np.double) image2[11:13, 11:13] = 1 mu2 = moments_central(image2, 11.5, 11.5) nu2 = moments_normalized(mu2) # central moments must be translation and scale invariant assert_almost_equal(nu, nu2, decimal=1)
def test_moments_normalized(): image = np.zeros((20, 20), dtype=np.double) image[13:17, 13:17] = 1 mu = moments_central(image, (14.5, 14.5)) nu = moments_normalized(mu) # shift image by dx=-3, dy=-3 and scale by 0.5 image2 = np.zeros((20, 20), dtype=np.double) image2[11:13, 11:13] = 1 mu2 = moments_central(image2, (11.5, 11.5)) nu2 = moments_normalized(mu2) # central moments must be translation and scale invariant assert_almost_equal(nu, nu2, decimal=1)
def test_moments_normalized(): image = np.zeros((20, 20), dtype=np.float64) image[13:17, 13:17] = 1 mu = moments_central(image, (14.5, 14.5)) nu = moments_normalized(mu) # shift image by dx=-2, dy=-2 and scale non-zero extent by 0.5 image2 = np.zeros((20, 20), dtype=np.float64) # scale amplitude by 0.7 image2[11:13, 11:13] = 0.7 mu2 = moments_central(image2, (11.5, 11.5)) nu2 = moments_normalized(mu2) # central moments must be translation and scale invariant assert_almost_equal(nu, nu2, decimal=1)
def test_moments_hu(): image = np.zeros((20, 20), dtype=np.double) image[13:15, 13:17] = 1 mu = moments_central(image, 13.5, 14.5) nu = moments_normalized(mu) hu = moments_hu(nu) # shift image by dx=2, dy=3, scale by 0.5 and rotate by 90deg image2 = np.zeros((20, 20), dtype=np.double) image2[11, 11:13] = 1 image2 = image2.T mu2 = moments_central(image2, 11.5, 11) nu2 = moments_normalized(mu2) hu2 = moments_hu(nu2) # central moments must be translation and scale invariant assert_almost_equal(hu, hu2, decimal=1)
def test_moments_hu(): image = np.zeros((20, 20), dtype=np.double) image[13:15, 13:17] = 1 mu = moments_central(image, (13.5, 14.5)) nu = moments_normalized(mu) hu = moments_hu(nu) # shift image by dx=2, dy=3, scale by 0.5 and rotate by 90deg image2 = np.zeros((20, 20), dtype=np.double) image2[11, 11:13] = 1 image2 = image2.T mu2 = moments_central(image2, (11.5, 11)) nu2 = moments_normalized(mu2) hu2 = moments_hu(nu2) # central moments must be translation and scale invariant assert_almost_equal(hu, hu2, decimal=1)
def collect(path, mean, std): img = io.imread('./images/' + path + '.bmp') hist = exposure.histogram(img) th = get_threshold('./images/' + path + '.bmp') img_binary = (img < th).astype(np.double) img_label = label(img_binary, background=0) regions = regionprops(img_label) boxes = [] features = [] for props in regions: box = [] minr, minc, maxr, maxc = props.bbox if maxc - minc < 10 or maxr - minr < 10 or maxc - minc > 120 or maxr - minr > 120: continue box.append(minr) box.append(maxr) box.append(minc) box.append(maxc) boxes.append(box) roi = img_binary[minr:maxr, minc:maxc] m = moments(roi) cr = m[0, 1] / m[0, 0] cc = m[1, 0] / m[0, 0] mu = moments_central(roi, cr, cc) nu = moments_normalized(mu) hu = moments_hu(nu) features.append(hu) feature_arr = normalize(features, mean, std) return (boxes, feature_arr)
def huMoment(self, img): h = 1 - img m = moments(h) cr = m[0, 1] / m[0, 0] cc = m[1, 0] / m[0, 0] mu = moments_central(h, cr, cc) return mu
def featuresExtractor_Hu(image): img = rgb2gray(image) hu = moments_central(img) hu = moments_normalized(hu) hu = moments_hu(hu) l = [norm(f) for f in hu] return l
def get_data(x, y, img): # Create data and 5x5 image slice data = [] new_image = img[x - 2:x + 2, y - 2:y + 2] # Append location of pixel data.append(x) data.append(y) # Append pixel values to data for pixel_val in new_image.ravel(): data.append(pixel_val) # Append central moments to data central_moments = measure.moments_central(new_image) for central_moment in central_moments.ravel(): data.append(central_moment) # Append hu moments to data hu_moments = measure.moments_hu( measure.moments_normalized(central_moments)) for hu_moment in hu_moments: data.append(hu_moment) # Append variation of pixel values to data variation = np.var(new_image) data.append(variation) return data
def get_hu_moment_from_image(image): """ Compute the 7 Hu's moments from an image. This set of moments is proofed to be translation, scale and rotation invariant. Parameters ---------- image: array-like a 2d array of double or uint8 corresponding to an image Returns ------- (7, 1) array of double 7 Hu's moments References ---------- http://scikit-image.org/docs/dev/api/skimage.measure.html#skimage.measure.moments """ order = 7 raw_moments = moments(image, order=order) cr = raw_moments[0, 1] / raw_moments[0, 0] cc = raw_moments[1, 0] / raw_moments[0, 0] central_moments = moments_central(image, cr, cc, order=order) normalized_moments = moments_normalized(central_moments, order) hu_moments = moments_hu(normalized_moments) return hu_moments
def hist(image): """Create histogram""" return moments_hu( moments_normalized( moments_central(image) ) )
def get_hu_moments(samples): print "getting hu moments..." features = [] for sample in samples: ''' sample = np.array(sample) th = 200 img_binary = (sample < th).astype(np.double) img_label = label(img_binary, background=255) regions = regionprops(img_label) if regions == []: print "no regions" for props in regions: minr, minc, maxr, maxc = props.bbox roi = img_binary[minr:maxr, minc:maxc] ''' sample = np.array(sample) sample = sample.astype(np.double) m = moments(sample) cr = m[0, 1] / m[0, 0] cc = m[1, 0] / m[0, 0] mu = moments_central(sample, cr, cc) nu = moments_normalized(mu) hu = moments_hu(nu) features.append(hu) return features
def extract_features(roi, props): features = [] m = moments(roi) # print(m) cr = m[0, 1] / m[0, 0] cc = m[1, 0] / m[0, 0] mu = moments_central(roi, (cr, cc)) nu = moments_normalized(mu) #finding Seven Features hu = moments_hu(nu) # seven features to be put into feature list features.extend(hu) # print(features) features.append(roi.shape[1]/roi.shape[0]) features.append(props.eccentricity) features.append(props.convex_area/props.area) features.append(props.orientation) features.append(props.euler_number) return np.array([features])
def __compute_moments(self, data, centroid, radius): """ Compute moments""" # - Compute central moments mom_c= moments_central(data, center=centroid, order=3) # - Compute normalized moments mom_norm= moments_normalized(mom_c, 3) # - Compute Hu moments mom_hu= moments_hu(mom_norm) # - Flatten moments mom_c= mom_c.flatten() # - Compute Zernike moments # NB: mahotas takes only positive pixels and rescale image by sum(pix) internally poldeg= 4 nmom_zernike= 9 mom_zernike= [-999]*nmom_zernike try: mom_zernike = mahotas.features.zernike_moments(data, radius, degree=poldeg, cm=centroid) ##mom_zernike = mahotas.features.zernike_moments(mask, radius, degree=poldeg, cm=centroid) except Exception as e: logger.warn("Failed to compute Zernike moments (err=%s)!" % (str(e))) #print("--> mom_zernike") #print(mom_zernike) return (mom_c, mom_hu, mom_zernike)
def extract_features(path, show, tag): img = io.imread('./images/' + path + '.bmp') hist = exposure.histogram(img) th = get_threshold('./images/' + path + '.bmp') img_binary = (img < th).astype(np.double) img_label = label(img_binary, background=0) # Show images if show == 1: io.imshow(img) plt.title('Original Image') io.show() plt.bar(hist[1], hist[0]) plt.title('Histogram') plt.show() io.imshow(img_binary) plt.title('Binary Image') io.show() io.imshow(img_label) plt.title('Labeled Image') io.show() regions = regionprops(img_label) if show == 1: io.imshow(img_binary) ax = plt.gca() features = [] for props in regions: minr, minc, maxr, maxc = props.bbox if maxc - minc < 10 or maxr - minr < 10 or maxc - minc > 120 or maxr - minr > 120: continue if show == 1: ax.add_patch( Rectangle((minc, minr), maxc - minc, maxr - minr, fill=False, edgecolor='red', linewidth=1)) roi = img_binary[minr:maxr, minc:maxc] m = moments(roi) cr = m[0, 1] / m[0, 0] cc = m[1, 0] / m[0, 0] mu = moments_central(roi, cr, cc) nu = moments_normalized(mu) hu = moments_hu(nu) features.append(hu) if (len(path) == 1): tag.append(ord(path)) if show == 1: plt.title('Bounding Boxes') io.show() return features
def test_moments_central(): image = np.zeros((20, 20), dtype=np.double) image[14, 14] = 1 image[15, 15] = 1 image[14, 15] = 0.5 image[15, 14] = 0.5 mu = moments_central(image, 14.5, 14.5) # shift image by dx=2, dy=2 image2 = np.zeros((20, 20), dtype=np.double) image2[16, 16] = 1 image2[17, 17] = 1 image2[16, 17] = 0.5 image2[17, 16] = 0.5 mu2 = moments_central(image2, 14.5 + 2, 14.5 + 2) # central moments must be translation invariant assert_equal(mu, mu2)
def compute_hu_moments(i): b = cells_aligned_padded[i].astype(np.uint8) m = moments(b, order=1) hu = moments_hu( moments_normalized( moments_central(b, cc=m[0, 1] / m[0, 0], cr=m[1, 0] / m[0, 0]))) return hu
def test_moments_hu_dtype(dtype): image = np.zeros((20, 20), dtype=np.double) image[13:15, 13:17] = 1 mu = moments_central(image, (13.5, 14.5)) nu = moments_normalized(mu) hu = moments_hu(nu.astype(dtype)) assert hu.dtype == dtype
def get_moments(image: np.ndarray, position: Tuple[int, int]): x, y = position[0], position[1] d = 3 part = image[x - d:x + d + 1, y - d:y + d + 1] h = moments_hu(part) # print(h) c = moments_central(part, order=4) # print(c) return *(c.reshape((25, 1))), *h
def describe(self, image): #calculate daisy feature descriptors mc = measure.moments_central(image) mn = measure.moments_normalized(mc) mh = measure.moments_hu(mn) # return Hu moments return mh
def extractFeature(name, showall, showbb, flag): (img, regions, ax, rthre, cthre) = extractImage(name, showall, showbb, flag) Features = [] boxes = [] for props in regions: tmp = [] minr, minc, maxr, maxc = props.bbox if maxc - minc < cthre or maxr - minr < rthre or maxc - minc > cthre * 9 or maxr - minr > rthre * 9: continue tmp.append(minr) tmp.append(minc) tmp.append(maxr) tmp.append(maxc) boxes.append(tmp) if showbb == 1: ax.add_patch( Rectangle((minc, minr), maxc - minc, maxr - minr, fill=False, edgecolor='red', linewidth=1)) # computing hu moments and removing small components roi = img[minr:maxr, minc:maxc] m = moments(roi) cr = m[0, 1] / m[0, 0] cc = m[1, 0] / m[0, 0] mu = moments_central(roi, cr, cc) nu = moments_normalized(mu) hu = moments_hu(nu) area = (maxr - minr) * (maxc - minc) # add convexity p = perimeter(img[minr:maxr, minc:maxc]) con = (area / (p * p)) * 4 * math.pi convex = np.array([con]) hu = np.concatenate((hu, convex)) # add density den = area / float(props.convex_area) dense = np.array([den]) hu = np.concatenate((hu, dense)) Features.append(hu) # print boxes plt.title('Bounding Boxes') if showbb == 1: io.show() return Features, boxes,
def testKNN(): trainFeatures, trainLebels = extractFeatures() knn = neighbors.KNeighborsClassifier() knn.fit(trainFeatures, trainLebels) #score = knn.score(trainFeatures, trainLebels) testNames = ['test1', 'test2'] #testNames = ['test2'] testFeatures = [] testLabels = [] testTruth = [] correct = 0 #textPosition = [] for i in range(len(testNames)): classes, locations = readPkl(testNames[i]) img = io.imread(testNames[i] + '.bmp') #testTruth = ['a']*7+['d']*7+['m']*7+['n']*7+['o']*7+['p']*7+['q']*7+['r']*7+['u']*7+['w']*7 ret, binary = cv.threshold(img, 0, 255, cv.THRESH_BINARY | cv.THRESH_OTSU) #ret, binary = cv.threshold(img, 0, 255, cv.THRESH_BINARY | cv.THRESH_TRIANGLE) th = ret img_binary = (img < th).astype(np.double) img_dilation = morphology.binary_dilation(img_binary, selem=None) img_erosion = morphology.binary_erosion(img_binary, selem=None) img_label = label(img_binary, background=0) regions = regionprops(img_label) io.imshow(img_binary) ax = plt.gca() thresholdR = 15 thresholdC = 15 for props in regions: minr, minc, maxr, maxc = props.bbox # Computing Hu Moments and Removing Small Components if (maxr - minr) >= thresholdR and (maxc - minc) >= thresholdC: #textPosition.append((maxc, minr)) roi = img_binary[minr:maxr, minc:maxc] m = moments(roi) cr = m[0, 1] / m[0, 0] cc = m[1, 0] / m[0, 0] mu = moments_central(roi, cr, cc) nu = moments_normalized(mu) hu = moments_hu(nu) testFeatures.append(hu) testLabels.append(knn.predict([testFeatures[-1]])) indexFix = locationFix(locations, minr, minc, maxr, maxc) if indexFix is not None: if testLabels[-1] == classes[indexFix]: correct += 1 plt.text(maxc, minr, testLabels[-1][0], bbox=dict(facecolor='white', alpha=0.5)) ax.add_patch(Rectangle((minc, minr), maxc - minc, maxr - minr, fill=False, edgecolor='red', linewidth=1)) plt.title('Bounding Boxes') io.show() print correct, len(testLabels) correctRate = correct / len(testLabels) print correctRate
def test_moments_normalized_3d(): image = draw.ellipsoid(1, 1, 10) mu_image = moments_central(image) nu = moments_normalized(mu_image) assert nu[0, 0, 2] > nu[0, 2, 0] assert_almost_equal(nu[0, 2, 0], nu[2, 0, 0]) coords = np.where(image) mu_coords = moments_coords_central(coords) assert_almost_equal(mu_coords, mu_image)
def moments_central(self): """ Central moments (translation invariant) of the source up to 3rd order. """ from skimage.measure import moments_central ycentroid, xcentroid = self.cutout_centroid.value return moments_central(self._data_cutout_maskzeroed_double, ycentroid, xcentroid, 3)
def test_moments_normalized_spacing(anisotropic): image = np.zeros((20, 20), dtype=np.double) image[13:17, 13:17] = 1 if not anisotropic: spacing1 = (1, 1) spacing2 = (3, 3) else: spacing1 = (1, 2) spacing2 = (2, 4) mu = moments_central(image, spacing=spacing1) nu = moments_normalized(mu, spacing=spacing1) mu2 = moments_central(image, spacing=spacing2) nu2 = moments_normalized(mu2, spacing=spacing2) # result should be invariant to absolute scale of spacing assert_almost_equal(nu, nu2)
def drawPictureWithContour(image, x): #fig = plt.figure() fig, ax = plt.subplots() #rescale exposure min = np.percentile(image, 5) perc = np.percentile(image, qthPercentile) resc = exposure.rescale_intensity(image,in_range=(min, perc)) #kernel is used for dilatation and erosion kernel = np.ones((5,5),np.uint8) #we will be changing the outcome depending on the image value, hence the hsv format img = color.rgb2hsv(resc) height, width, channels = img.shape #print("width: " + str(width)) #print("height: " + str(height)) #print("channels: " + str(channels)) #create 'inverse' array inv = np.zeros([height, width]) for i in range(height): for j in range(width): #sky is a lot brighter than planes. inverse array will contain the data used for edge finding inv[i][j] = 1 - img[i][j][2] #time to smooth the results #inv = gaussian(inv, sigma=0.8) #erosion and dilatation to patch up objects on the sky inv = cv2.erode(inv,kernel,iterations = 1) inv = cv2.dilate(inv,kernel,iterations = 3) inv = cv2.erode(inv,kernel,iterations = 1) #actual contour finding contours = measure.find_contours(inv, contourLevelValue) for n, contours in enumerate(contours): M = measure.moments_central(contours, 0, 0) centerX = int(M[1,0] / M[0,0]) centerY = int(M[0,1] / M[0,0]) print("centerX: " + str(centerX)) print("centerY: " + str(centerY)) ax.add_artist(plt.Circle((centerX, centerY), 5, color='w')) plt.plot(contours[:,1],contours[:,0],linewidth=contourWidth, color=rcg(myColors)) ''' ~~~ displaying image with matplotlib opencv represents rgb images as nd-array, but in reverse order (they are bgr instead of rgb) we have to convert them back to rgb using COLOR_BGR2RGB function ''' plt.imshow(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) #plt.imshow(inv) #plt.show() fig.savefig(str(x) + ".pdf")
def _irafstarfind_moments(imgcutout, kernel, sky): """ Find the properties of each detected source, as defined by IRAF's ``starfind``. Parameters ---------- imgcutout : `_ImgCutout` The image cutout for a single detected source. kernel : `_FindObjKernel` The convolution kernel. The dimensions should match those of ``imgcutout``. ``kernel.gkernel`` should have a peak pixel value of 1.0 and not contain any masked pixels. sky : float The local sky level around the source. Returns ------- result : dict A dictionary of the object parameters. """ from skimage.measure import moments, moments_central result = defaultdict(list) img = np.array((imgcutout.data - sky) * kernel.mask) img = np.where(img > 0, img, 0) # starfind discards negative pixels if np.count_nonzero(img) <= 1: return {} m = moments(img, 1) result['xcentroid'] = m[1, 0] / m[0, 0] result['ycentroid'] = m[0, 1] / m[0, 0] result['npix'] = float(np.count_nonzero(img)) # float for easier testing result['sky'] = sky result['peak'] = np.max(img) flux = img.sum() result['flux'] = flux result['mag'] = -2.5 * np.log10(flux) mu = moments_central(img, result['ycentroid'], result['xcentroid'], 2) / m[0, 0] musum = mu[2, 0] + mu[0, 2] mudiff = mu[2, 0] - mu[0, 2] result['fwhm'] = 2.0 * np.sqrt(np.log(2.0) * musum) result['sharpness'] = result['fwhm'] / kernel.fwhm result['roundness'] = np.sqrt(mudiff**2 + 4.0 * mu[1, 1]**2) / musum pa = 0.5 * np.arctan2(2.0 * mu[1, 1], mudiff) * (180.0 / np.pi) if pa < 0.0: pa += 180.0 result['pa'] = pa result['xcentroid'] += imgcutout.x0 result['ycentroid'] += imgcutout.y0 return result
def _irafstarfind_moments(imgcutout, kernel, sky): """ Find the properties of each detected source, as defined by IRAF's ``starfind``. Parameters ---------- imgcutout : `_ImgCutout` The image cutout for a single detected source. kernel : `_FindObjKernel` The convolution kernel. The dimensions should match those of ``imgcutout``. ``kernel.gkernel`` should have a peak pixel value of 1.0 and not contain any masked pixels. sky : float The local sky level around the source. Returns ------- result : dict A dictionary of the object parameters. """ from skimage.measure import moments, moments_central result = defaultdict(list) img = np.array((imgcutout.data - sky) * kernel.mask) img = np.where(img > 0, img, 0) # starfind discards negative pixels if np.count_nonzero(img) <= 1: return {} m = moments(img, 1) result['xcentroid'] = m[1, 0] / m[0, 0] result['ycentroid'] = m[0, 1] / m[0, 0] result['npix'] = float(np.count_nonzero(img)) # float for easier testing result['sky'] = sky result['peak'] = np.max(img) flux = img.sum() result['flux'] = flux result['mag'] = -2.5 * np.log10(flux) mu = moments_central( img, result['ycentroid'], result['xcentroid'], 2) / m[0, 0] musum = mu[2, 0] + mu[0, 2] mudiff = mu[2, 0] - mu[0, 2] result['fwhm'] = 2.0 * np.sqrt(np.log(2.0) * musum) result['sharpness'] = result['fwhm'] / kernel.fwhm result['roundness'] = np.sqrt(mudiff**2 + 4.0*mu[1, 1]**2) / musum pa = 0.5 * np.arctan2(2.0 * mu[1, 1], mudiff) * (180.0 / np.pi) if pa < 0.0: pa += 180.0 result['pa'] = pa result['xcentroid'] += imgcutout.x0 result['ycentroid'] += imgcutout.y0 return result
def test_moments_central(): image = np.zeros((20, 20), dtype=np.double) image[14, 14] = 1 image[15, 15] = 1 image[14, 15] = 0.5 image[15, 14] = 0.5 mu = moments_central(image, (14.5, 14.5)) # check for proper centroid computation mu_calc_centroid = moments_central(image) assert_equal(mu, mu_calc_centroid) # shift image by dx=2, dy=2 image2 = np.zeros((20, 20), dtype=np.double) image2[16, 16] = 1 image2[17, 17] = 1 image2[16, 17] = 0.5 image2[17, 16] = 0.5 mu2 = moments_central(image2, (14.5 + 2, 14.5 + 2)) # central moments must be translation invariant assert_equal(mu, mu2)
def _describe(self, binary, steps=None): clipped = binary.clip(max=1) m = measure.moments(clipped) cr = m[0, 1] / m[0, 0] cc = m[1, 0] / m[0, 0] central = measure.moments_central(clipped, cr, cc) normalized = measure.moments_normalized(central) moments = measure.moments_hu(normalized) # nan determines, that moment could not be described, # but is hard to handle in prediction, set to zero instead moments[np.isnan(moments)] = 0 return moments
def extract_features(img): # This function extract our features out of an image. It basically # computes the 8 (and not 7) Hu geometrical moments. To do this we # first compute the moments, centralize and normalize them before # computing Hu moments m = moments(img) cr = m[0,1] / m[0,0] cc = m[1,0] / m[0,0] mc = moments_central(img, cr, cc) mn = moments_normalized(mc) hu = moments_hu(mn) i8 = mn[1, 1] * ( (mn[3, 0] + mn[1, 2])**2 - (mn[0,3]+mn[2,1])**2 ) - (mn[2,0] - mn[0,2]) * (mn[3,0] + mn[1,2]) * (mn[0,3] + mn[2,1]) return append(hu, [i8])
def test_moments_central_coords(): image = np.zeros((20, 20), dtype=np.double) image[13:17, 13:17] = 1 mu_image = moments_central(image, (14.5, 14.5)) coords = np.array([[r, c] for r in range(13, 17) for c in range(13, 17)], dtype=np.double) mu_coords = moments_coords_central(coords, (14.5, 14.5)) assert_almost_equal(mu_coords, mu_image) # ensure that center is being calculated normally mu_coords_calc_centroid = moments_coords_central(coords) assert_almost_equal(mu_coords_calc_centroid, mu_coords) # shift image by dx=3 dy=3 image = np.zeros((20, 20), dtype=np.double) image[16:20, 16:20] = 1 mu_image = moments_central(image, (14.5, 14.5)) coords = np.array([[r, c] for r in range(16, 20) for c in range(16, 20)], dtype=np.double) mu_coords = moments_coords_central(coords, (14.5, 14.5)) assert_almost_equal(mu_coords, mu_image)
def momentos_hu(self): """ Calcula os 7 momentos de Hu """ m = measure.moments(self.imagemTonsDeCinza) row = m[0, 1] / m[0, 0] col = m[1, 0] / m[0, 0] mu = measure.moments_central(self.imagemTonsDeCinza,row,col) nu = measure.moments_normalized(mu) hu = measure.moments_hu(nu) valores = list(hu) nomes = [m+n for m,n in zip(['hu_'] * len(valores),map(str,range(0,len(valores))))] tipos = [numerico] * len(nomes) return nomes, tipos, valores
def get_moments(self): """ Return moments from frame Returns ------- moments : pandas Series object with the following keys - m10 : row position of centroid - m01 : col position of centroid - mupr20 : higher moments - mupr02 : higher moments - mupr11 : higher moments """ frame = ma.masked_invalid(self) frame.fill_value = 0 frame = frame.filled() frame *= self.ap_weights # Multiply frame by aperture weights # Compute the centroid m = measure.moments(frame) m10=m[1,0] m01=m[0,1] moments = np.array([m10,m01]) moments /= m[0,0] # Compute central moments (second order) mu = measure.moments_central(frame,moments[0],moments[1]) mupr20 = mu[2,0] mupr02 = mu[0,2] mupr11 = mu[1,1] c_moments = np.array([mupr20,mupr02,mupr11]) c_moments/=mu[0,0] moments = np.hstack([moments,c_moments]) return moments
def shape_params(data, data_mask=None): """ Calculate the centroid and shape parameters for an object using image moments. Parameters ---------- data : array_like The 2D image data. data_mask : array_like, bool, optional A boolean mask with the same shape as ``data``, where a `True` value indicates the corresponding element of ``data`` is invalid. Returns ------- dict : A dictionary containing the object shape parameters: * ``xcen, ycen``: object centroid (zero-based origin). * ``major_axis``: length of the major axis * ``minor_axis``: length of the minor axis * ``eccen``: eccentricity. The ratio of half the distance between its two ellipse foci to the length of the the semimajor axis. * ``pa``: position angle of the major axis. Increases clockwise from the positive x axis. * ``covar``: corresponding covariance matrix for a 2D Gaussian * ``linear_eccen`` : linear eccentricity is the distance between the object center and either of its two ellipse foci. """ from skimage.measure import moments, moments_central if data_mask is not None: if data.shape != data_mask.shape: raise ValueError('data and data_mask must have the same shape') data[data_mask] = 0. result = {} xcen, ycen = centroid_com(data) m = moments(data, 1) mu = moments_central(data, ycen, xcen, 2) / m[0, 0] result['xcen'] = xcen result['ycen'] = ycen # musum = mu[2, 0] + mu[0, 2] mudiff = mu[2, 0] - mu[0, 2] pa = 0.5 * np.arctan2(2.0*mu[1, 1], mudiff) * (180.0 / np.pi) if pa < 0.0: pa += 180.0 result['pa'] = pa covar = np.array([[mu[2, 0], mu[1, 1]], [mu[1, 1], mu[0, 2]]]) result['covar'] = covar eigvals, eigvecs = np.linalg.eigh(covar) majsq = np.max(eigvals) minsq = np.min(eigvals) result['major_axis'] = np.sqrt(majsq) result['minor_axis'] = np.sqrt(minsq) # if True: # equivalent calculation # tmp = np.sqrt(4.0*mu[1,1]**2 + mudiff**2) # majsq = 0.5 * (musum + tmp) # minsq = 0.5 * (musum - tmp) # result['major_axis2'] = np.sqrt(majsq) # result['minor_axis2'] = np.sqrt(minsq) result['eccen'] = np.sqrt(1.0 - (minsq / majsq)) result['linear_eccen'] = np.sqrt(majsq - minsq) return result
def shape_params(data, mask=None): """ Calculate the centroid and shape parameters of a 2D array (e.g., an image cutout of an object) using image moments. Parameters ---------- data : array_like or `~astropy.nddata.NDData` The 2D array of the image. mask : array_like, bool, optional A boolean mask with the same shape as ``data``, where a `True` value indicates the corresponding element of ``data`` is invalid. If ``mask`` is input it will override ``data.mask`` for `~astropy.nddata.NDData` inputs. Returns ------- params : dict A dictionary containing the object shape parameters: * ``xcen, ycen``: The object centroid (zero-based origin). * ``major_axis``: The length of the major axis of the ellipse that has the same second-order moments as the input image. * ``minor_axis``: The length of the minor axis of the ellipse that has the same second-order moments as the input image. * ``eccen``: The eccentricity of the ellipse that has the same second-order moments as the input image. The eccentricity is the ratio of half the distance between the two ellipse foci to the length of the semimajor axis. * ``angle``: Angle in radians between the positive x axis and the major axis of the ellipse that has the same second-order moments as the input image. The angle increases counter-clockwise. * ``covar``: The covariance matrix of the ellipse that has the same second-order moments as the input image. * ``linear_eccen`` : The linear eccentricity of the ellipse that has the same second-order moments as the input image. Linear eccentricity is the distance between the ellipse center and either of its two foci. """ from skimage.measure import moments, moments_central data = _convert_image(data, mask=mask) xcen, ycen = centroid_com(data) m = moments(data, 1) mu = moments_central(data, ycen, xcen, 2) / m[0, 0] result = {} result['xcen'] = xcen result['ycen'] = ycen mudiff = mu[2, 0] - mu[0, 2] angle = 0.5 * np.arctan2(2.0 * mu[1, 1], mudiff) * (180.0 / np.pi) if angle < 0.0: angle += np.pi result['angle'] = angle covar = np.array([[mu[2, 0], mu[1, 1]], [mu[1, 1], mu[0, 2]]]) result['covar'] = covar eigvals, eigvecs = np.linalg.eigh(covar) majsq = np.max(eigvals) minsq = np.min(eigvals) result['major_axis'] = np.sqrt(majsq) result['minor_axis'] = np.sqrt(minsq) # equivalent calculation of major/minor axes: # tmp = np.sqrt(4.0*mu[1,1]**2 + mudiff**2) # musum = mu[2, 0] + mu[0, 2] # majsq = 0.5 * (musum + tmp) # minsq = 0.5 * (musum - tmp) # result['major_axis2'] = np.sqrt(majsq) # result['minor_axis2'] = np.sqrt(minsq) result['eccen'] = np.sqrt(1.0 - (minsq / majsq)) result['linear_eccen'] = np.sqrt(majsq - minsq) return result
# Assignment 4 - Morphology # - not done yet img = array(Image.open('figure_problem_set_4.tiff')) img = 1 * (img > 1) imshow(img, cmap = cm.Greys_r, interpolation = 'none') equivTable, imgLabel = regionLabel(img) imshow(imgLabel, cmap = cm.Greys_r, interpolation = 'none') # HOMEWORK 6 # Assignment 3 - calculate moments img = skimageIO.imread('img_moment.tif').astype(float) imshow(img, cmap = cm.Greys_r, interpolation = 'none') moments_deg1 = skimageMeasure.moments(img, order = 1) x0 = moments_deg1[0, 1] / moments_deg1[0, 0] y0 = moments_deg1[1, 0] / moments_deg1[0, 0] momentsCentral_deg2 = skimageMeasure.moments_central(img, x0, y0, order = 2) m00 = momentsCentral_deg2[0, 0] m11 = momentsCentral_deg2[1, 1] m02 = momentsCentral_deg2[0, 2] m20 = momentsCentral_deg2[2, 0] orientation = degrees(arctan2(2 * m11, (m20 - m02)) / 2) # HOUGH TRANSFORM HOMEWORK img = skimageIO.imread('img_hough_circle.tiff').astype(float) # Enhance edges by Sobel operator h1 = array([[1, 2, 1], [0, 0, 0], [-1, -2, -1]]) imgH1 = convolve2d(img, h1) h2 = array([[-1, 0, 1], [-2, 0, 2], [-1, 0, 1]]) imgH2 = convolve2d(img, h2) img = 1.0 * (sqrt(imgH1 ** 2 + imgH2 ** 2) > 0)
def compute_hu_moments(i): b = cells_aligned_padded[i].astype(np.uint8) m = moments(b, order=1) hu = moments_hu(moments_normalized(moments_central(b, cc=m[0,1]/m[0,0], cr=m[1,0]/m[0,0]))) return hu
def central_geom_moments_sk(self, order): return measure.moments_central(self.image, self.centroid_sk()['x'], self.centroid_sk()['y'], order)