def dual_gradient_energy(img):
    '''
        Calculate the vertical energy of each pixel
        @return: the energy representation of the img
        x_gradient = r_x**2 + g_x**2 + b_x**2
        y_gradient = r_y**2 + g_y**2 + b_y**2
        energy = x_gradient + y_gradient
    '''
    h, w = img.shape[:2]
    energy = [[0 for i in range(0, w)] for j in range(0, h)]
    R = img[:, :, 0]
    G = img[:, :, 1]
    B = img[:, :, 2]

    r_y = filter.hsobel(R)
    g_y = filter.hsobel(G)
    b_y = filter.hsobel(B)

    r_x = filter.vsobel(R)
    g_x = filter.vsobel(G)
    b_x = filter.vsobel(B)

    for i in range(0, h):
        for j in range(0, w):
            y_en = (r_y[i][j]**2) + (g_y[i][j]**2) + (b_y[i][j]**2)
            x_en = (r_x[i][j]**2) + (g_x[i][j]**2) + (b_x[i][j]**2)
            energy[i][j] = y_en + x_en
    return energy
Exemplo n.º 2
0
    def initialize(self, start, stop, skip):
        self._width = self._img.shape[:2][0]
        self._height = self._img.shape[:2][1]
        size = self._width * self._height
        self._weights = [float] * size
        self._edgeTo = [int] * size
        self._distTo = [float] * size
        R = self._img[:, :, 0]
        G = self._img[:, :, 1]
        B = self._img[:, :, 2]

        self._vR = vsobel(R)
        self._vG = vsobel(G)
        self._vB = vsobel(B)

        self._hR = hsobel(R)
        self._hG = hsobel(G)
        self._hB = hsobel(B)
        for v in xrange(0, size, 1):
            if ((v >= start) and (v < stop) and (v - start) % skip == 0):
                self._distTo[v] = 0
            else:
                self._distTo[v] = sys.maxint
            self._edgeTo[v] = -1
            self._weights[v] = self.energy(self.col(v), self.row(v))
Exemplo n.º 3
0
def calculate_residue_scaled_gradients(patches,
                                       old_frame,
                                       new_frame,
                                       patch_radius,
                                       w_func=lambda x, y: 1):
    #Calculate some gradients:
    gradx = hsobel(new_frame)
    grady = vsobel(new_frame)

    #Calculate the scaled gradients:
    diff = np.subtract(old_frame, new_frame)
    ex = np.multiply(diff, gradx)
    ey = np.multiply(diff, grady)

    w = np.array([[None] * (patch_radius * 2 + 1)] * (patch_radius * 2 + 1))

    #Calculate the w:
    for i in range(-patch_radius, patch_radius + 1):
        for j in range(-patch_radius, patch_radius + 1):
            w[i + patch_radius, j + patch_radius] = w_func(i, j)

    #Get the scaled gradient at each point:
    e_list = np.array([None] * len(patches))

    for i, (x, y) in enumerate(patches):
        e_list[i] = calculate_residue_gradient(ex, ey, x, y, patch_radius, w)

    return e_list
 def dual_gradient_energy(self, img):
     horizontal_gradient = filter.hsobel(img)
     horizontal_gradient = np.square(horizontal_gradient)
     vertical_gradient = filter.vsobel(img)
     vertical_gradient = np.square(vertical_gradient)
     energy = horizontal_gradient + vertical_gradient
     return energy
Exemplo n.º 5
0
def calculate_structure_tensors(frame,
                                feature_list,
                                patch_radius,
                                w_func=lambda x, y: 1):
    #Find gradients:
    gradx = hsobel(frame)
    grady = vsobel(frame)

    #Calculate the w:
    w = np.array([[None] * (patch_radius * 2 + 1)] * (patch_radius * 2 + 1))
    for i in range(-patch_radius, patch_radius + 1):
        for j in range(-patch_radius, patch_radius + 1):
            w[i + patch_radius, j + patch_radius] = w_func(i, j)

    #Precompute values through numpy:
    Gxx = np.multiply(gradx, gradx)
    Gxy = np.multiply(gradx, grady)
    Gyy = np.multiply(grady, grady)

    #Calculate the G matrix of every feature:
    G_list = np.array([None] * len(feature_list))
    for i, (x, y) in enumerate(feature_list):
        G_list[i] = calculate_structure_tensor(Gxx, Gxy, Gyy, x, y,
                                               patch_radius, w)

    return G_list
Exemplo n.º 6
0
 def frac_hedges__frac_vedges(self, image):
     '''
     fraction of all grayscale pixels that lie on vertical and
     horizontal edges in the image interior
     '''
     v = vsobel(image)
     h = hsobel(image)
     return [v.mean(), h.mean()]
Exemplo n.º 7
0
def sobelh(provider):
    """
    sobel horizontal
    """
    gray = provider.as_gray()
    dx = hsobel(gray)  # horizontal derivative

    return np.mean(dx)
Exemplo n.º 8
0
def test_hsobel_horizontal():
    """Horizontal Sobel on an edge should be a horizontal line"""
    i, j = np.mgrid[-5:6, -5:6]
    image = (i >= 0).astype(float)
    result = F.hsobel(image)
    # Fudge the eroded points
    i[np.abs(j) == 5] = 10000
    assert (np.all(result[i == 0] == 1))
    assert (np.all(result[np.abs(i) > 1] == 0))
Exemplo n.º 9
0
def test_hsobel_horizontal():
    """Horizontal Sobel on an edge should be a horizontal line."""
    i, j = np.mgrid[-5:6, -5:6]
    image = (i >= 0).astype(float)
    result = F.hsobel(image)
    # Fudge the eroded points
    i[np.abs(j) == 5] = 10000
    assert (np.all(result[i == 0] == 1))
    assert (np.all(result[np.abs(i) > 1] == 0))
Exemplo n.º 10
0
def sobel_features(image):
    h = hsobel(image)
    v = vsobel(image)
    Gx = np.sum(h)
    Gy = np.sum(v)
    m = math.sqrt(Gx**2 + Gy**2)
    g = 0
    if(Gx == 0):
        g = 0 if Gy == 0 else math.pi
    else:
        g = math.atan2(Gy,Gx)
    return [m, g, Gx, Gy]
Exemplo n.º 11
0
def dual_gradient_energy(img):
    R = img[:, :, 0]
    G = img[:, :, 1]
    B = img[:, :, 2]

    hR = filter.hsobel(R)
    hG = filter.hsobel(G)
    hB = filter.hsobel(B)

    vR = filter.vsobel(R)
    vG = filter.vsobel(G)
    vB = filter.vsobel(B)

    sumRG = np.add(np.square(hR), np.square(hG))
    x_square = np.add(sumRG, np.square(hB))

    sumRGv = np.add(np.square(vR), np.square(vG))
    y_square = np.add(sumRGv, np.square(vB))

    energy = np.add(x_square, y_square)
    return energy
Exemplo n.º 12
0
def sobeld(provider):
    """
    sobel mean direction
    """
    gray = provider.as_gray()

    dx = hsobel(gray) # horizontal derivative
    dy = vsobel(gray) # vertical derivative

    dirs = np.arctan2(dy, dx)

    return np.mean(dirs)
Exemplo n.º 13
0
def percent_vert_horiz_lines(FilledBlobImg, props_area):
    v = filter.vsobel(FilledBlobImg)
    v_floor = filter.threshold_otsu(v)
    ind_v = np.where(v > v_floor)
    h = filter.hsobel(FilledBlobImg)
    h_floor = filter.threshold_otsu(h)
    ind_h = np.where(h > h_floor)

    vert_and_horiz = np.zeros(v.shape).astype("bool")
    vert_and_horiz[ind_v] = True
    vert_and_horiz[ind_h] = True
    ind = np.where(vert_and_horiz)[0]
    return float(ind.size) / props_area
def process(filename):
    imagepath = os.path.join(os.getcwd(), filename)
    orig_img = io.imread(filename,True,'pil')
    img = orig_img > 0.9 # binary threshold
    lines = probabilistic_hough_line(hsobel(img),line_length=200)
    for l in lines:
        x0, x1 = l[0][0],l[1][0]
        y = l[0][1]
        for x in range(x0,x1):
            img[y+1,x] = 1
            img[y,x] = 1
            img[y-1,x] = 1
    erode_img = erosion(img, square(2))
    contours, lengths = compute_contours(erode_img,0.8)
    lengths = pd.Series(lengths)
    lengths = lengths[lengths > 400]
    for i in lengths.index:
        contour = contours[i]
        box = get_boundingboxes([contour])[0]
        x_sum = sum(map(abs, np.gradient(contour[:,1])))
        y_sum = sum(map(abs, np.gradient(contour[:,0])))
        area = (box[2] - box[0]) * (box[3] - box[1])
        plt.plot(contour[:,1],contour[:,0])
    contours = [contours[i] for i in lengths.index]
    newboxes = set(link_contours(contours))
    retboxes = []
    for box in newboxes:
        minx,miny,maxx,maxy = box
        x = (minx, maxx, maxx, minx, minx)
        y = (miny, miny, maxy, maxy, miny)
        area = (maxx-minx) * (maxy-miny)
        if area > 10000:
            retboxes.append(box)
            plt.plot(x, y, '-b', linewidth=2)
    imshow(erode_img)
    return retboxes, contours
def process(filename):
    imagepath = os.path.join(os.getcwd(), filename)
    orig_img = io.imread(filename, True, 'pil')
    img = orig_img > 0.9  # binary threshold
    lines = probabilistic_hough_line(hsobel(img), line_length=200)
    for l in lines:
        x0, x1 = l[0][0], l[1][0]
        y = l[0][1]
        for x in range(x0, x1):
            img[y + 1, x] = 1
            img[y, x] = 1
            img[y - 1, x] = 1
    erode_img = erosion(img, square(2))
    contours, lengths = compute_contours(erode_img, 0.8)
    lengths = pd.Series(lengths)
    lengths = lengths[lengths > 400]
    for i in lengths.index:
        contour = contours[i]
        box = get_boundingboxes([contour])[0]
        x_sum = sum(map(abs, np.gradient(contour[:, 1])))
        y_sum = sum(map(abs, np.gradient(contour[:, 0])))
        area = (box[2] - box[0]) * (box[3] - box[1])
        plt.plot(contour[:, 1], contour[:, 0])
    contours = [contours[i] for i in lengths.index]
    newboxes = set(link_contours(contours))
    retboxes = []
    for box in newboxes:
        minx, miny, maxx, maxy = box
        x = (minx, maxx, maxx, minx, minx)
        y = (miny, miny, maxy, maxy, miny)
        area = (maxx - minx) * (maxy - miny)
        if area > 10000:
            retboxes.append(box)
            plt.plot(x, y, '-b', linewidth=2)
    imshow(erode_img)
    return retboxes, contours
def gradient_map(image):
    h = hsobel(image)
    v = vsobel(image)
    return np.dstack((h,v))
Exemplo n.º 17
0
					graycheck = False
					if pix<thresh:
						if not graycheck:
							graycheck = True
							horcount = horcount + 1
					else:
						graycheck = False
				for pix in vertslice:
					graycheck = False
					if pix<thresh:
						if not graycheck:
							graycheck = True
							vertcount = vertcount + 1
					else:
						graycheck = False
				features['hsobel'] = np.nanmean(filter.hsobel(image[j]))
				features['vsobel'] = np.nanmean(filter.vsobel(image[j]))
				features['peaklocalmax'] = np.nanmean(peak_local_max(image[j]))
				features['felzen'] = np.nanmean(segmentation.felzenszwalb(image[j]))
				if np.isnan(features['peaklocalmax']):
					features['peaklocalmax'] = 0.0
				if np.isnan(features['felzen']):
					features['felzen'] = 0.0
				hormirror = image[j][:maxPixel/2]-image[j][maxPixel-1:maxPixel/2:-1]
				vertmirror = image[j][:,:maxPixel/2]-image[j][:,maxPixel-1:maxPixel/2:-1]
				#__End moved region
				image[j] = resize(image[j], (maxPixel, maxPixel))
				
				#From http://scikit-image.org/docs/dev/auto_examples/plot_local_binary_pattern.html
				lbp = local_binary_pattern(image[j], n_points, radius, METHOD)
				n_bins = lbp.max()+1
Exemplo n.º 18
0
def gradient_orientation_map(image):
    h = hsobel(image)
    v = vsobel(image)
    return np.arctan2(h, v)
Exemplo n.º 19
0
def test_hsobel_zeros():
    """Horizontal sobel on an array of all zeros"""
    result = F.hsobel(np.zeros((10, 10)), np.ones((10, 10), bool))
    assert (np.all(result == 0))
Exemplo n.º 20
0
def test_hsobel_vertical():
    """Horizontal Sobel on a vertical edge should be zero"""
    i, j = np.mgrid[-5:6, -5:6]
    image = (j >= 0).astype(float)
    result = F.hsobel(image)
    assert (np.all(result == 0))
Exemplo n.º 21
0
"""
This example illustrates the use of the horizontal Sobel filter, to compute
horizontal gradients.
"""

from skimage import data, filter
import matplotlib.pyplot as plt

text = data.text()
hsobel_text = filter.hsobel(text)

plt.figure(figsize=(12, 3))

plt.subplot(121)
plt.imshow(text, cmap='gray', interpolation='nearest')
plt.axis('off')
plt.subplot(122)
plt.imshow(hsobel_text, cmap='jet', interpolation='nearest')
plt.axis('off')
plt.tight_layout()
plt.show()
Exemplo n.º 22
0
def prep():
	header = "acantharia_protist_big_center,acantharia_protist_halo,acantharia_protist,amphipods,appendicularian_fritillaridae,appendicularian_s_shape,appendicularian_slight_curve,appendicularian_straight,artifacts_edge,artifacts,chaetognath_non_sagitta,chaetognath_other,chaetognath_sagitta,chordate_type1,copepod_calanoid_eggs,copepod_calanoid_eucalanus,copepod_calanoid_flatheads,copepod_calanoid_frillyAntennae,copepod_calanoid_large_side_antennatucked,copepod_calanoid_large,copepod_calanoid_octomoms,copepod_calanoid_small_longantennae,copepod_calanoid,copepod_cyclopoid_copilia,copepod_cyclopoid_oithona_eggs,copepod_cyclopoid_oithona,copepod_other,crustacean_other,ctenophore_cestid,ctenophore_cydippid_no_tentacles,ctenophore_cydippid_tentacles,ctenophore_lobate,decapods,detritus_blob,detritus_filamentous,detritus_other,diatom_chain_string,diatom_chain_tube,echinoderm_larva_pluteus_brittlestar,echinoderm_larva_pluteus_early,echinoderm_larva_pluteus_typeC,echinoderm_larva_pluteus_urchin,echinoderm_larva_seastar_bipinnaria,echinoderm_larva_seastar_brachiolaria,echinoderm_seacucumber_auricularia_larva,echinopluteus,ephyra,euphausiids_young,euphausiids,fecal_pellet,fish_larvae_deep_body,fish_larvae_leptocephali,fish_larvae_medium_body,fish_larvae_myctophids,fish_larvae_thin_body,fish_larvae_very_thin_body,heteropod,hydromedusae_aglaura,hydromedusae_bell_and_tentacles,hydromedusae_h15,hydromedusae_haliscera_small_sideview,hydromedusae_haliscera,hydromedusae_liriope,hydromedusae_narco_dark,hydromedusae_narco_young,hydromedusae_narcomedusae,hydromedusae_other,hydromedusae_partial_dark,hydromedusae_shapeA_sideview_small,hydromedusae_shapeA,hydromedusae_shapeB,hydromedusae_sideview_big,hydromedusae_solmaris,hydromedusae_solmundella,hydromedusae_typeD_bell_and_tentacles,hydromedusae_typeD,hydromedusae_typeE,hydromedusae_typeF,invertebrate_larvae_other_A,invertebrate_larvae_other_B,jellies_tentacles,polychaete,protist_dark_center,protist_fuzzy_olive,protist_noctiluca,protist_other,protist_star,pteropod_butterfly,pteropod_theco_dev_seq,pteropod_triangle,radiolarian_chain,radiolarian_colony,shrimp_caridean,shrimp_sergestidae,shrimp_zoea,shrimp-like_other,siphonophore_calycophoran_abylidae,siphonophore_calycophoran_rocketship_adult,siphonophore_calycophoran_rocketship_young,siphonophore_calycophoran_sphaeronectes_stem,siphonophore_calycophoran_sphaeronectes_young,siphonophore_calycophoran_sphaeronectes,siphonophore_other_parts,siphonophore_partial,siphonophore_physonect_young,siphonophore_physonect,stomatopod,tornaria_acorn_worm_larvae,trichodesmium_bowtie,trichodesmium_multiple,trichodesmium_puff,trichodesmium_tuft,trochophore_larvae,tunicate_doliolid_nurse,tunicate_doliolid,tunicate_partial,tunicate_salp_chains,tunicate_salp,unknown_blobs_and_smudges,unknown_sticks,unknown_unclassified".split(',')
	with open('namesClasses.dat','rb') as f:
		namesClasses = cPickle.load(f)
	labels = map(lambda s: s.split('\\')[-1], namesClasses)
	for i in range(len(namesClasses)):
		currentClass = namesClasses[i]
		root_class.data_search(currentClass).id_list.append(i)
	#get the total test images
	#Full set
	#fnames = glob.glob(os.path.join("competition_data", "test", "*.jpg"))
	#Smaller set
	fnames = glob.glob(os.path.join("smaller_comp", "test", "*.jpg"))
	
	numberofTestImages = len(fnames)

	X = np.zeros((numberofTestImages, num_features), dtype=float)

	#Get filename separate from prefix path
	images = map(lambda fileName: fileName.split('\\')[-1], fnames)

	i = 0
	# report progress for each 1% done  
	report = [int((j+1)*numberofTestImages/100.) for j in range(100)]
	for fileName in fnames:
		# Read in the images and create the features
		image = imread(fileName, as_grey=True)
		image = rotImage(image)
		# Added from https://github.com/Newmu/Stroke-Prediction/blob/master/startPredictingGenCode.py
		thresh = 0.9*255
		# if image.min < 0.75*255:
			# img = image < thresh
		# else:
			# img = image
		# if img.sum() != 0:
			# imgX,imgY = np.nonzero(img)
			# imgW = imgX.max()-imgX.min()
			# imgH = imgY.max()-imgY.min()
			# if (imgW>1 and imgH>1):
				# image = image[imgX.min():imgX.max(),imgY.min():imgY.max()]
		# #----------------------------------
		cvimage = cv2.imread(fileName)
		features = getMinorMajorRatio(image)
		#__Begin moved region
		#From http://nbviewer.ipython.org/github/kqdtran/AY250-F13/blob/master/hw4/hw4.ipynb
		pca = decomposition.PCA(n_components=25)
		PCAFeatures = pca.fit_transform(image)
		PCAevr = pca.explained_variance_ratio_
		for evr in range(len(PCAevr)):
			if np.isnan(PCAevr[evr]):
				PCAevr[evr] = 0.0
		#_____________________________________________________________
		corners = getCorners(cvimage,features['orientation'])
		horslice = image[:,maxPixel/2]
		vertslice = image[maxPixel/2]
		#correlation = signal.correlate(image,image)
		#horcorrelation = signal.correlate(horslice,horslice)
		#vertcorrelation = signal.correlate(vertslice,vertslice)
		#crosscorrelation = signal.correlate(horslice,vertslice)
		#correlation = correlation/correlation[correlation.shape[0]/2,correlation.shape[0]/2]
		#horcorrelation = horcorrelation/horcorrelation[horcorrelation.shape[0]/2]
		#vertcorrelation = vertcorrelation/vertcorrelation[vertcorrelation.shape[0]/2]
		#crosscorrelation = crosscorrelation/crosscorrelation[horcorrelation.shape[0]/2]
		hormean = np.mean(horslice)
		horstd = np.std(horslice)
		vertmean = np.mean(vertslice)
		vertstd = np.std(vertslice)
		horcount = vertcount = 0
		for pix in horslice:
			graycheck = False
			if pix<thresh:
				if not graycheck:
					graycheck = True
					horcount = horcount + 1
			else:
				graycheck = False
		for pix in vertslice:
			graycheck = False
			if pix<thresh:
				if not graycheck:
					graycheck = True
					vertcount = vertcount + 1
			else:
				graycheck = False
		features['hsobel'] = np.nanmean(filter.hsobel(image))
		features['vsobel'] = np.nanmean(filter.vsobel(image))
		features['peaklocalmax'] = np.nanmean(peak_local_max(image))
		features['felzen'] = np.nanmean(segmentation.felzenszwalb(image))
		if np.isnan(features['peaklocalmax']):
			features['peaklocalmax'] = 0.0
		if np.isnan(features['felzen']):
			features['felzen'] = 0.0
		#hormirror = image[:maxPixel/2]-image[maxPixel-1:maxPixel/2:-1]
		#vertmirror = image[:,:maxPixel/2]-image[:,maxPixel-1:maxPixel/2:-1]		
		image = resize(image, (maxPixel, maxPixel))
					
		#From http://scikit-image.org/docs/dev/auto_examples/plot_local_binary_pattern.html
		lbp = local_binary_pattern(image, n_points, radius, METHOD)
		n_bins = lbp.max()+1
		lbpcounts = np.histogram(lbp,n_bins,normed=True,range=(0, n_bins))[0]
		#_____________________________________________________________
		#__Moved region was here
		#dist_trans = ndimage.distance_transform_edt(image[0])
		# Store the rescaled image pixels and the axis ratio
		#fd, hog_image = hog(image[0], orientations=8, pixels_per_cell=(2, 2),
		#    cells_per_block=(1, 1), visualise=True)
		#X[i, 0:imageSize] = np.reshape(dist_trans, (1, imageSize))
		#X[i, 0:imageSize] = np.reshape(hog_image, (1, imageSize))
					
		# Store the rescaled image pixels and the axis ratio
		X[i, 0:imageSize] = np.reshape(image, (1, imageSize))
		#X[i, imageSize:imageSize+corr2dsize] = np.reshape(correlation, (1,corr2dsize))
		#X[i, imageSize+corr2dsize:imageSize+corr2dsize+corrsize] = np.reshape(horcorrelation, (1,corrsize))
		#X[i, imageSize+corr2dsize+corrsize:imageSize+corr2dsize+2*corrsize] = np.reshape(vertcorrelation, (1,corrsize))
		#X[i, imageSize+corr2dsize+2*corrsize:imageSize+corr2dsize+3*corrsize] = np.reshape(crosscorrelation, (1,corrsize))
		featcount = imageSize+3*corrsize+corr2dsize
		for k,v in features.items():
			try:
				X[i, featcount:featcount+len(v)] = v
				featcount = featcount + len(v)
			except TypeError, te:
				X[i, featcount] = v
				featcount = featcount + 1
		X[i, featcount:featcount+lbpcounts.size] = lbpcounts
		X[i, featcount+lbpcounts.size:featcount+lbpcounts.size+PCAsize] = PCAevr
		X[i, featcount+lbpcounts.size+PCAsize] = np.mean(PCAFeatures)
		i += 1
		if i in report: print np.ceil(i *100.0 / numberofTestImages), "% done"
Exemplo n.º 23
0
                    graycheck = False
                    if pix < thresh:
                        if not graycheck:
                            graycheck = True
                            horcount = horcount + 1
                    else:
                        graycheck = False
                for pix in vertslice:
                    graycheck = False
                    if pix < thresh:
                        if not graycheck:
                            graycheck = True
                            vertcount = vertcount + 1
                    else:
                        graycheck = False
                features["hsobel"] = np.nanmean(filter.hsobel(image[j]))
                features["vsobel"] = np.nanmean(filter.vsobel(image[j]))
                features["peaklocalmax"] = np.nanmean(peak_local_max(image[j]))
                features["felzen"] = np.nanmean(segmentation.felzenszwalb(image[j]))
                if np.isnan(features["peaklocalmax"]):
                    features["peaklocalmax"] = 0.0
                if np.isnan(features["felzen"]):
                    features["felzen"] = 0.0
                hormirror = image[j][: maxPixel / 2] - image[j][maxPixel - 1 : maxPixel / 2 : -1]
                vertmirror = image[j][:, : maxPixel / 2] - image[j][:, maxPixel - 1 : maxPixel / 2 : -1]
                # __End moved region
                image[j] = resize(image[j], (maxPixel, maxPixel))

                # From http://scikit-image.org/docs/dev/auto_examples/plot_local_binary_pattern.html
                lbp = local_binary_pattern(image[j], n_points, radius, METHOD)
                n_bins = lbp.max() + 1
Exemplo n.º 24
0
def test_hsobel_mask():
    """Horizontal Sobel on a masked array should be zero."""
    np.random.seed(0)
    result = F.hsobel(np.random.uniform(size=(10, 10)),
                      np.zeros((10, 10), bool))
    assert (np.all(result == 0))
Exemplo n.º 25
0
def test_hsobel_vertical():
    """Horizontal Sobel on a vertical edge should be zero."""
    i, j = np.mgrid[-5:6, -5:6]
    image = (j >= 0).astype(float) * np.sqrt(2)
    result = F.hsobel(image)
    assert_allclose(result, 0, atol=1e-10)
Exemplo n.º 26
0
import numpy as np
import skimage.io as io
from skimage import filter
from scipy.signal import argrelextrema
from scipy.signal import medfilt

img = io.imread('../../data/cbct/fp200_x70_3s_5fps_60kV_150uA_00001.tif')
#img=io.imread('../../data/cbct/fp150_x350_3s_5fps_60kV_150uA_00001.tif')

#plt.figure(figsize=(10,2/3*10))

plt.subplot(321)
plt.imshow(img)
##
# Prepare
res = np.abs(filter.gaussian_filter(filter.hsobel(img), 5))
res = np.clip(res, 0.0, 0.2)
plt.subplot(322)
plt.imshow(res)
plt.colorbar()

# Identify the left and right sides of the circles
#vsum=medfilt(np.sum(res,0),7)
vsum = np.sum(res, 0)
idx = argrelextrema(vsum, np.greater)
peaks = vsum[idx]
p1 = np.argmax(peaks)
peaks = np.delete(peaks, p1)
p2 = np.argmax(peaks)
p2 = p2 + 1 if p1 < p2 else p2
p1 = idx[0][p1]
Exemplo n.º 27
0
 def edge_sobel_h(img2d):
     "gray input"
     imgsize = float((img2d.shape[0]*img2d.shape[1]))
     med_filter = ndimg.median_filter(img2d, size = (5,5))
     edges_h = filter.hsobel(med_filter/255.)
     return edges_h.sum()/imgsize
Exemplo n.º 28
0
def test_hsobel_vertical():
    """Horizontal Sobel on a vertical edge should be zero"""
    i, j = np.mgrid[-5:6, -5:6]
    image = (j >= 0).astype(float)
    result = F.hsobel(image)
    assert (np.all(result == 0))
def gradient_orientation_map(image):
    h = hsobel(image)
    v = vsobel(image)
    return np.arctan2(h,v)
Exemplo n.º 30
0
def main():
    files = []
    # Generate training data
    i = 0
    label = 0
    # List of string of class names
    namesClasses = list()
    features = {}
    features = features.copy()
    print "Reading images"
    # Navigate through the list of directories
    for folder in directory_names:
        #Get name of class directory separate from prefix path
        currentClass = folder.split(os.sep)[-1]
        namesClasses.append(currentClass)
        root_class.data_search(currentClass).id_list.append(
            len(namesClasses) - 1)
        for fileNameDir in os.walk(folder):
            for fileName in fileNameDir[2]:
                # Only read in the images
                if fileName[-4:] != ".jpg":
                    continue

                # Read in the images and create the features
                nameFileImage = "{0}{1}{2}".format(fileNameDir[0], os.sep,
                                                   fileName)
                image = []
                image.append(imread(nameFileImage, as_grey=True))
                features['original_size'] = image[0].size
                #image[0] = equalize_hist(image[0])
                image[0] = rotImage(image[0])
                # Added from https://github.com/Newmu/Stroke-Prediction/blob/master/startPredictingGenCode.py
                thresh = 0.9 * 255
                # if image.min < 0.75*255:
                # img = image < thresh
                # else:
                # img = image
                # if img.sum() != 0:
                # imgX,imgY = np.nonzero(img)
                # imgW = imgX.max()-imgX.min()
                # imgH = imgY.max()-imgY.min()
                # if (imgW>1 and imgH>1):
                # image = image[imgX.min():imgX.max(),imgY.min():imgY.max()]
                #----------------------------------
                cvimage = cv2.imread(nameFileImage)
                #image[0] = gaussian_filter(image[0],sigma=2)
                files.append(nameFileImage)
                image.append(np.fliplr(image[0]))
                image.append(np.flipud(image[0]))
                image.append(np.fliplr(image[2]))
                # image.append(np.rot90(image[0]))
                # image.append(np.fliplr(image[4]))
                # image.append(np.flipud(image[4]))
                # image.append(np.fliplr(image[6]))
                for j in range(len(image)):
                    features = getMinorMajorRatio(image[j])
                    #__Begin moved region
                    #From http://nbviewer.ipython.org/github/kqdtran/AY250-F13/blob/master/hw4/hw4.ipynb
                    pca = decomposition.PCA(n_components=25)
                    PCAFeatures = pca.fit_transform(image[0])
                    #_____________________________________________________________
                    corners = getCorners(cvimage, features['orientation'])
                    horslice = image[j][:, maxPixel / 2]
                    vertslice = image[j][maxPixel / 2]
                    #correlation = signal.correlate(image,image)
                    #horcorrelation = signal.correlate(horslice,horslice)
                    #vertcorrelation = signal.correlate(vertslice,vertslice)
                    #crosscorrelation = signal.correlate(horslice,vertslice)
                    #correlation = correlation/correlation[correlation.shape[0]/2,correlation.shape[0]/2]
                    #horcorrelation = horcorrelation/horcorrelation[horcorrelation.shape[0]/2]
                    #vertcorrelation = vertcorrelation/vertcorrelation[vertcorrelation.shape[0]/2]
                    #crosscorrelation = crosscorrelation/crosscorrelation[horcorrelation.shape[0]/2]
                    hormean = np.mean(horslice)
                    horstd = np.std(horslice)
                    vertmean = np.mean(vertslice)
                    vertstd = np.std(vertslice)
                    horcount = vertcount = 0
                    for pix in horslice:
                        graycheck = False
                        if pix < thresh:
                            if not graycheck:
                                graycheck = True
                                horcount = horcount + 1
                        else:
                            graycheck = False
                    for pix in vertslice:
                        graycheck = False
                        if pix < thresh:
                            if not graycheck:
                                graycheck = True
                                vertcount = vertcount + 1
                        else:
                            graycheck = False
                    features['hsobel'] = np.nanmean(filter.hsobel(image[j]))
                    features['vsobel'] = np.nanmean(filter.vsobel(image[j]))
                    features['peaklocalmax'] = np.nanmean(
                        peak_local_max(image[j]))
                    features['felzen'] = np.nanmean(
                        segmentation.felzenszwalb(image[j]))
                    if np.isnan(features['peaklocalmax']):
                        features['peaklocalmax'] = 0.0
                    if np.isnan(features['felzen']):
                        features['felzen'] = 0.0
                    #hormirror = image[j][:maxPixel/2]-image[j][maxPixel-1:maxPixel/2:-1]
                    #vertmirror = image[j][:,:maxPixel/2]-image[j][:,maxPixel-1:maxPixel/2:-1]
                    #__End moved region
                    image[j] = resize(image[j], (maxPixel, maxPixel))

                    #From http://scikit-image.org/docs/dev/auto_examples/plot_local_binary_pattern.html
                    lbp = local_binary_pattern(image[j], n_points, radius,
                                               METHOD)
                    n_bins = lbp.max() + 1
                    lbpcounts = np.histogram(lbp,
                                             n_bins,
                                             normed=True,
                                             range=(0, n_bins))[0]
                    #_____________________________________________________________
                    #__Moved region was here
                    #dist_trans = ndimage.distance_transform_edt(image[0])
                    # Store the rescaled image pixels and the axis ratio
                    #fd, hog_image = hog(image[0], orientations=8, pixels_per_cell=(2, 2),
                    #    cells_per_block=(1, 1), visualise=True)
                    #X[i*imagesperfile+j, 0:imageSize] = np.reshape(dist_trans, (1, imageSize))
                    #X[i*imagesperfile+j, 0:imageSize] = np.reshape(hog_image, (1, imageSize))
                    X[i * imagesperfile + j,
                      0:imageSize] = np.reshape(image[j], (1, imageSize))
                    #X[i*imagesperfile+j, imageSize:imageSize+corr2dsize] = np.reshape(correlation, (1,corr2dsize))
                    #X[i*imagesperfile+j, imageSize+corr2dsize:imageSize+corr2dsize+corrsize] = np.reshape(horcorrelation, (1,corrsize))
                    #X[i*imagesperfile+j, imageSize+corr2dsize+corrsize:imageSize+corr2dsize+2*corrsize] = np.reshape(vertcorrelation, (1,corrsize))
                    #X[i*imagesperfile+j, imageSize+corr2dsize+2*corrsize:imageSize+corr2dsize+3*corrsize] = np.reshape(crosscorrelation, (1,corrsize))
                    featcount = imageSize + 3 * corrsize + corr2dsize
                    for k, v in features.items():
                        try:
                            X[i * imagesperfile + j,
                              featcount:featcount + len(v)] = v
                            featcount = featcount + len(v)
                        except TypeError, te:
                            X[i * imagesperfile + j, featcount] = v
                            featcount = featcount + 1
                    X[i * imagesperfile + j,
                      featcount:featcount + lbpcounts.size] = lbpcounts
                    X[i * imagesperfile + j,
                      featcount + lbpcounts.size:featcount + lbpcounts.size +
                      PCAsize] = pca.explained_variance_ratio_
                    X[i * imagesperfile + j, featcount + lbpcounts.size +
                      PCAsize] = np.mean(PCAFeatures)
                    # Store the classlabel
                    y[i * imagesperfile + j] = label
                    if i * imagesperfile + j in report:
                        print np.ceil((i * imagesperfile + j) * 100.0 /
                                      (num_rows)), "% done"
                i += 1
        label += 1
Exemplo n.º 31
0
def extract_features(im_cat_path_list):
    '''
    Extract Features takes a list of 2 element lists:
        
        [catagory of image , path to image]
    
    and extracts 15 features from this image. The output is a list of 3 element lists:
    
        [catagory of image , path to image, [list of features]]
        
    The 15 features are:
        
        1     Product of image pixel dimensions (image size)
        2     Mean of Grayscale Image
        3     Area of Sobel Edges Normalized By Image Size
        4     Area of Sobel Edges Above 2x Mean of Sobel Edges Normalized By Image Size
        5     Area of Canny Edges (Sum of booleans) Normalized By Image Size
        6     Number of Harris Corners
        7     Unique Felzenszwalb Image Segmentation Lines
        8     Area of Vertical Sobel Edges Above 2x Mean of Sobel Edges Normalized By Image Size
        9     Area of Horizontal Sobel Edges Above 2x Mean of Sobel Edges Normalized By Image Size
        10-12 Mean of Red/Green/Blue Channels (if grayscale: mean of the only color channel)
        13    Maximum Pixel Value of the Histogram of Oriented Gradients
        14    Percent of image that is light versus dark with adaptive thresholding
        15-17 Percent of image that is red/green/blue with adaptive thresholding
    '''

    cat_path_features = []

    for im_cat, im_path in im_cat_path_list:

        #RAW IMAGE
        im_raw = imread(im_path)  #image matrix

        #RAW IMAGE FLATTENED IF NOT ALREADY FLAT
        if len(np.shape(im_raw)) == 3:
            im_raw_flat = np.median(im_raw, axis=2)
        else:
            im_raw_flat = im_raw

        #Size of image
        im_y = float(im_raw.shape[0])
        im_x = float(im_raw.shape[1])
        im_size = im_y * im_x

        #LIST OF FEATURES
        features = []

        #FEATURE 1: Product of image pixel dimensions (image size)
        features.append(float(im_size))

        #FEATURE 2: Mean of Grayscale Image
        features.append(im_raw_flat.mean())

        #FEATURE 3: Area of Sobel Edges Normalized By Image Size
        im_edge_sobel = filter.sobel(im_raw_flat)
        features.append(im_edge_sobel.sum() / im_size)

        #FEATURE 4: Area of Sobel Edges Above 2x Mean of Sobel Edges Normalized By Image Size
        features.append(
            float((im_edge_sobel > im_edge_sobel.mean() * 2).sum()) / im_size)

        #FEATURE 5: Area of Canny Edges (Sum of booleans) Normalized By Image Size
        im_canny = filter.canny(im_raw_flat, sigma=8)
        features.append(im_canny.sum().astype(float) / im_size)

        #FEATURE 6: Number of Harris Corners
        im_corners = feature.corner_peaks(feature.corner_harris(im_raw_flat),
                                          min_distance=5)
        features.append(float(len(im_corners)))

        #FEATURE 7: Unique Felzenszwalb Image Segmentation Lines
        im_raw_float = util.img_as_float(im_raw[::2, ::2])
        im_felzen_segments = segmentation.felzenszwalb(im_raw_float,
                                                       scale=100,
                                                       sigma=0.5,
                                                       min_size=50)
        features.append(float(len(np.unique(im_felzen_segments))))

        #FEATURE 8: Area of Vertical Sobel Edges Above 2x Mean of Sobel Edges Normalized By Image Size
        im_edge_vsobel = filter.vsobel(im_raw_flat)
        features.append(
            float(
                (im_edge_vsobel > im_edge_vsobel.mean() * 2).sum()) / im_size)

        #FEATURE 9: Area of Horizontal Sobel Edges Above 2x Mean of Sobel Edges Normalized By Image Size
        im_edge_hsobel = filter.hsobel(im_raw_flat)
        features.append(
            float(
                (im_edge_hsobel > im_edge_hsobel.mean() * 2).sum()) / im_size)

        #FEATURE 10-12: Mean of Red/Green/Blue Channels (if grayscale: mean of the only color channel)
        if len(np.shape(im_raw)) == 3:
            features.append(im_raw[..., 0].mean())
            features.append(im_raw[..., 1].mean())
            features.append(im_raw[..., 2].mean())
        else:
            features.append(im_raw_flat.mean())
            features.append(im_raw_flat.mean())
            features.append(im_raw_flat.mean())

        #FEATURE 13: Maximum Pixel Value of the Histogram of Oriented Gradients
        im_fd, im_hog = feature.hog(im_raw_flat,
                                    orientations=8,
                                    pixels_per_cell=(16, 16),
                                    cells_per_block=(1, 1),
                                    visualise=True)
        features.append(im_hog.max())

        #FEATURE 14: Percent of image that is light versus dark with adaptive thresholding
        im_thres_flat = filter.threshold_adaptive(im_raw_flat, 100, 'mean')
        features.append(im_thres_flat.sum() / im_size)

        #FEATURE 15-17: Percent of image that is red/green/blue with adaptive thresholding
        im_thres_red = filter.threshold_adaptive(im_raw[..., 0], 100, 'mean')
        im_thres_green = filter.threshold_adaptive(im_raw[..., 1], 100, 'mean')
        im_thres_blue = filter.threshold_adaptive(im_raw[..., 2], 100, 'mean')
        features.append(im_thres_red.sum() / im_size)
        features.append(im_thres_green.sum() / im_size)
        features.append(im_thres_blue.sum() / im_size)

        #BUILD OUTPUT LIST FOR THIS IMAGE
        cat_path_features.append([im_cat, im_path, features])

        #CLEAR IMAGE PROC DATA
        del im_raw
        del im_raw_flat
        del im_raw_float
        del im_edge_sobel
        del im_canny
        del im_corners
        del im_felzen_segments
        del im_edge_vsobel
        del im_edge_hsobel
        del im_fd
        del im_hog

    return cat_path_features
Exemplo n.º 32
0
def main():
	files = []
	# Generate training data
	i = 0    
	label = 0
	# List of string of class names
	namesClasses = list()
	features = {}
	features = features.copy()
	print "Reading images"
	# Navigate through the list of directories
	for folder in directory_names:
		#Get name of class directory separate from prefix path
		currentClass = folder.split(os.sep)[-1] 
		namesClasses.append(currentClass)
		root_class.data_search(currentClass).id_list.append(len(namesClasses)-1)
		for fileNameDir in os.walk(folder):   
			for fileName in fileNameDir[2]:
				# Only read in the images
				if fileName[-4:] != ".jpg":
					continue
				
				# Read in the images and create the features
				nameFileImage = "{0}{1}{2}".format(fileNameDir[0], os.sep, fileName)
				image = []
				image.append(imread(nameFileImage, as_grey=True))
				features['original_size'] = image[0].size
				#image[0] = equalize_hist(image[0])
				image[0] = rotImage(image[0])
				# Added from https://github.com/Newmu/Stroke-Prediction/blob/master/startPredictingGenCode.py
				thresh = 0.9*255
				# if image.min < 0.75*255:
					# img = image < thresh
				# else:
					# img = image
				# if img.sum() != 0:
					# imgX,imgY = np.nonzero(img)
					# imgW = imgX.max()-imgX.min()
					# imgH = imgY.max()-imgY.min()
					# if (imgW>1 and imgH>1):
						# image = image[imgX.min():imgX.max(),imgY.min():imgY.max()]
				#----------------------------------
				cvimage = cv2.imread(nameFileImage)
				#image[0] = gaussian_filter(image[0],sigma=2)
				files.append(nameFileImage)
				image.append(np.fliplr(image[0]))
				image.append(np.flipud(image[0]))
				image.append(np.fliplr(image[2]))
				# image.append(np.rot90(image[0]))
				# image.append(np.fliplr(image[4]))
				# image.append(np.flipud(image[4]))
				# image.append(np.fliplr(image[6]))
				for j in range(len(image)):
					features = getMinorMajorRatio(image[j])
					#__Begin moved region
					#From http://nbviewer.ipython.org/github/kqdtran/AY250-F13/blob/master/hw4/hw4.ipynb
					pca = decomposition.PCA(n_components=25)
					PCAFeatures = pca.fit_transform(image[0])
					#_____________________________________________________________
					corners = getCorners(cvimage,features['orientation'])
					horslice = image[j][:,maxPixel/2]
					vertslice = image[j][maxPixel/2]
					#correlation = signal.correlate(image,image)
					#horcorrelation = signal.correlate(horslice,horslice)
					#vertcorrelation = signal.correlate(vertslice,vertslice)
					#crosscorrelation = signal.correlate(horslice,vertslice)
					#correlation = correlation/correlation[correlation.shape[0]/2,correlation.shape[0]/2]
					#horcorrelation = horcorrelation/horcorrelation[horcorrelation.shape[0]/2]
					#vertcorrelation = vertcorrelation/vertcorrelation[vertcorrelation.shape[0]/2]
					#crosscorrelation = crosscorrelation/crosscorrelation[horcorrelation.shape[0]/2]
					hormean = np.mean(horslice)
					horstd = np.std(horslice)
					vertmean = np.mean(vertslice)
					vertstd = np.std(vertslice)
					horcount = vertcount = 0
					for pix in horslice:
						graycheck = False
						if pix<thresh:
							if not graycheck:
								graycheck = True
								horcount = horcount + 1
						else:
							graycheck = False
					for pix in vertslice:
						graycheck = False
						if pix<thresh:
							if not graycheck:
								graycheck = True
								vertcount = vertcount + 1
						else:
							graycheck = False
					features['hsobel'] = np.nanmean(filter.hsobel(image[j]))
					features['vsobel'] = np.nanmean(filter.vsobel(image[j]))
					features['peaklocalmax'] = np.nanmean(peak_local_max(image[j]))
					features['felzen'] = np.nanmean(segmentation.felzenszwalb(image[j]))
					if np.isnan(features['peaklocalmax']):
						features['peaklocalmax'] = 0.0
					if np.isnan(features['felzen']):
						features['felzen'] = 0.0
					#hormirror = image[j][:maxPixel/2]-image[j][maxPixel-1:maxPixel/2:-1]
					#vertmirror = image[j][:,:maxPixel/2]-image[j][:,maxPixel-1:maxPixel/2:-1]
					#__End moved region
					image[j] = resize(image[j], (maxPixel, maxPixel))
					
					#From http://scikit-image.org/docs/dev/auto_examples/plot_local_binary_pattern.html
					lbp = local_binary_pattern(image[j], n_points, radius, METHOD)
					n_bins = lbp.max()+1
					lbpcounts = np.histogram(lbp,n_bins,normed=True,range=(0, n_bins))[0]
					#_____________________________________________________________
					#__Moved region was here
					#dist_trans = ndimage.distance_transform_edt(image[0])
					# Store the rescaled image pixels and the axis ratio
					#fd, hog_image = hog(image[0], orientations=8, pixels_per_cell=(2, 2),
					#    cells_per_block=(1, 1), visualise=True)
					#X[i*imagesperfile+j, 0:imageSize] = np.reshape(dist_trans, (1, imageSize))
					#X[i*imagesperfile+j, 0:imageSize] = np.reshape(hog_image, (1, imageSize))
					X[i*imagesperfile+j, 0:imageSize] = np.reshape(image[j], (1, imageSize))
					#X[i*imagesperfile+j, imageSize:imageSize+corr2dsize] = np.reshape(correlation, (1,corr2dsize))
					#X[i*imagesperfile+j, imageSize+corr2dsize:imageSize+corr2dsize+corrsize] = np.reshape(horcorrelation, (1,corrsize))
					#X[i*imagesperfile+j, imageSize+corr2dsize+corrsize:imageSize+corr2dsize+2*corrsize] = np.reshape(vertcorrelation, (1,corrsize))
					#X[i*imagesperfile+j, imageSize+corr2dsize+2*corrsize:imageSize+corr2dsize+3*corrsize] = np.reshape(crosscorrelation, (1,corrsize))
					featcount = imageSize+3*corrsize+corr2dsize
					for k,v in features.items():
						try:
							X[i*imagesperfile+j, featcount:featcount+len(v)] = v
							featcount = featcount + len(v)
						except TypeError, te:
							X[i*imagesperfile+j, featcount] = v
							featcount = featcount + 1
					X[i*imagesperfile+j, featcount:featcount+lbpcounts.size] = lbpcounts
					X[i*imagesperfile+j, featcount+lbpcounts.size:featcount+lbpcounts.size+PCAsize] = pca.explained_variance_ratio_
					X[i*imagesperfile+j, featcount+lbpcounts.size+PCAsize] = np.mean(PCAFeatures)
					# Store the classlabel
					y[i*imagesperfile+j] = label
					if i*imagesperfile+j in report: print np.ceil((i*imagesperfile+j) *100.0 / (num_rows)), "% done"
				i += 1
		label += 1
Exemplo n.º 33
0
 X[i, imageSize + 3 * corrsize + corr2dsize + 11] = perimratio
 X[i, imageSize + 3 * corrsize + corr2dsize + 12] = arearatio
 X[i, imageSize + 3 * corrsize + corr2dsize + 13] = cornercenter
 X[i, imageSize + 3 * corrsize + corr2dsize +
   14] = cornercentercoords[0]
 X[i, imageSize + 3 * corrsize + corr2dsize +
   15] = cornercentercoords[1]
 X[i, imageSize + 3 * corrsize + corr2dsize + 16] = cornerstd
 X[i,
   imageSize + 3 * corrsize + corr2dsize + 17] = cornerstdcoords[0]
 X[i,
   imageSize + 3 * corrsize + corr2dsize + 18] = cornerstdcoords[1]
 X[i, imageSize + 3 * corrsize + corr2dsize + 19] = np.nanmean(
     filter.vsobel(image))
 X[i, imageSize + 3 * corrsize + corr2dsize + 20] = np.nanmean(
     filter.hsobel(image))
 X[i, imageSize + 3 * corrsize + corr2dsize + 21] = felzen
 X[i, imageSize + 3 * corrsize + corr2dsize + 22] = peaklocalmax
 X[i, imageSize + 3 * corrsize + corr2dsize + 23] = lrdiff
 X[i, imageSize + 3 * corrsize + corr2dsize + 24] = tbdiff
 X[i, imageSize + 3 * corrsize + corr2dsize + 25] = whu1
 X[i, imageSize + 3 * corrsize + corr2dsize + 26] = whu2
 X[i, imageSize + 3 * corrsize + corr2dsize + 27] = whu3
 X[i, imageSize + 3 * corrsize + corr2dsize + 28] = whu12
 X[i, imageSize + 3 * corrsize + corr2dsize + 29] = whu13
 X[i, imageSize + 3 * corrsize + corr2dsize + 30] = whu23
 X[i, imageSize + 3 * corrsize + corr2dsize + 31] = extent
 X[i, imageSize + 3 * corrsize + corr2dsize + 32] = minintensity
 X[i, imageSize + 3 * corrsize + corr2dsize + 33] = meanintensity
 X[i, imageSize + 3 * corrsize + corr2dsize + 34] = maxintensity
 X[i, imageSize + 3 * corrsize + corr2dsize + 35] = intensityratio1
Exemplo n.º 34
0
def sharpness(image, edge_threshold=0.0001, w=5, t=1.0):
    """
    Code implemented by the following article:
    Sharpness Estimation for Document and Scene Images,
    Kumar, Chen, Doermann
    """
    # Median filtering
    w_size = (w * 2) + 1
    image = util.img_as_float(image)
    image_m = util.img_as_float(median(image, mph.square(3)))

    # Window functions
    def dom_func(window):
        # import pdb; pdb.set_trace()
        return abs(
            abs(window[4] - window[2]) - abs(window[2] - window[0])
        )

    def contrast_func(window):
        # print window
        s = 0.0
        for i in xrange(0, len(window) - 1):
            # print i
            s += abs(window[i] - window[i+1])
        return s

    # Delta DoM in horizontal direction
    dom_x_values = generic_filter(image_m, dom_func,
        size=(1, 5),
        mode='reflect')
    # Delta DoM in vertical direction
    dom_y_values = generic_filter(image_m, dom_func,
        size=(5, 1),
        mode='reflect')

    dom_x = generic_filter(
        dom_x_values, lambda w: sum(w),
        size=(1, w_size), mode='reflect')
    dom_y = generic_filter(
        dom_y_values, lambda w: sum(w),
        size=(w_size, 1), mode='reflect')


    edges_x = vsobel(image)
    # Normalize
    edges_x *= (1.0 / edges_x.max())
    edges_x_pixels = len(edges_x[edges_x > edge_threshold].ravel())

    edges_y = hsobel(image)
    # Normalize
    edges_y *= (1.0 / edges_y.max())
    edges_y_pixels = len(edges_y[edges_y > edge_threshold].ravel())

    # Contrast in horizontal direction
    contrast_x = generic_filter(image, contrast_func,
        size=(1, w_size + 1),
        mode='reflect')
    # Contrast in vertical direction
    contrast_y = generic_filter(image, contrast_func,
        size=(w_size + 1, 1),
        mode='reflect')

    sharpness_x = dom_x / contrast_x
    sharpness_y = dom_y / contrast_y

    # import pdb; pdb.set_trace()

    sharp_x_pixels = len(np.where(
        sharpness_x[edges_x > edge_threshold] > t
    )[0])
    sharp_y_pixels = len(np.where(
        sharpness_y[edges_y > edge_threshold] > t
    )[0])

    # import pdb; pdb.set_trace()

    if edges_x_pixels > 0:
        rx = (float(sharp_x_pixels) / edges_x_pixels)
    else:
        rx = 1

    if edges_y_pixels > 0:
        ry = (float(sharp_y_pixels) / edges_y_pixels)
    else:
        ry = 1

    final_estimate = np.sqrt(
        (rx ** 2) + (ry ** 2)
    )    
    return final_estimate
Exemplo n.º 35
0
def gradient_map(image):
    h = hsobel(image)
    v = vsobel(image)
    return np.dstack((h, v))
Exemplo n.º 36
0
def test_hsobel_zeros():
    """Horizontal sobel on an array of all zeros."""
    result = F.hsobel(np.zeros((10, 10)), np.ones((10, 10), bool))
    assert (np.all(result == 0))
Exemplo n.º 37
0
def test_hsobel_mask():
    """Horizontal Sobel on a masked array should be zero"""
    np.random.seed(0)
    result = F.hsobel(np.random.uniform(size=(10, 10)), np.zeros((10, 10),
                                                                 bool))
    assert (np.all(result == 0))
Exemplo n.º 38
0
def prep():
    header = "acantharia_protist_big_center,acantharia_protist_halo,acantharia_protist,amphipods,appendicularian_fritillaridae,appendicularian_s_shape,appendicularian_slight_curve,appendicularian_straight,artifacts_edge,artifacts,chaetognath_non_sagitta,chaetognath_other,chaetognath_sagitta,chordate_type1,copepod_calanoid_eggs,copepod_calanoid_eucalanus,copepod_calanoid_flatheads,copepod_calanoid_frillyAntennae,copepod_calanoid_large_side_antennatucked,copepod_calanoid_large,copepod_calanoid_octomoms,copepod_calanoid_small_longantennae,copepod_calanoid,copepod_cyclopoid_copilia,copepod_cyclopoid_oithona_eggs,copepod_cyclopoid_oithona,copepod_other,crustacean_other,ctenophore_cestid,ctenophore_cydippid_no_tentacles,ctenophore_cydippid_tentacles,ctenophore_lobate,decapods,detritus_blob,detritus_filamentous,detritus_other,diatom_chain_string,diatom_chain_tube,echinoderm_larva_pluteus_brittlestar,echinoderm_larva_pluteus_early,echinoderm_larva_pluteus_typeC,echinoderm_larva_pluteus_urchin,echinoderm_larva_seastar_bipinnaria,echinoderm_larva_seastar_brachiolaria,echinoderm_seacucumber_auricularia_larva,echinopluteus,ephyra,euphausiids_young,euphausiids,fecal_pellet,fish_larvae_deep_body,fish_larvae_leptocephali,fish_larvae_medium_body,fish_larvae_myctophids,fish_larvae_thin_body,fish_larvae_very_thin_body,heteropod,hydromedusae_aglaura,hydromedusae_bell_and_tentacles,hydromedusae_h15,hydromedusae_haliscera_small_sideview,hydromedusae_haliscera,hydromedusae_liriope,hydromedusae_narco_dark,hydromedusae_narco_young,hydromedusae_narcomedusae,hydromedusae_other,hydromedusae_partial_dark,hydromedusae_shapeA_sideview_small,hydromedusae_shapeA,hydromedusae_shapeB,hydromedusae_sideview_big,hydromedusae_solmaris,hydromedusae_solmundella,hydromedusae_typeD_bell_and_tentacles,hydromedusae_typeD,hydromedusae_typeE,hydromedusae_typeF,invertebrate_larvae_other_A,invertebrate_larvae_other_B,jellies_tentacles,polychaete,protist_dark_center,protist_fuzzy_olive,protist_noctiluca,protist_other,protist_star,pteropod_butterfly,pteropod_theco_dev_seq,pteropod_triangle,radiolarian_chain,radiolarian_colony,shrimp_caridean,shrimp_sergestidae,shrimp_zoea,shrimp-like_other,siphonophore_calycophoran_abylidae,siphonophore_calycophoran_rocketship_adult,siphonophore_calycophoran_rocketship_young,siphonophore_calycophoran_sphaeronectes_stem,siphonophore_calycophoran_sphaeronectes_young,siphonophore_calycophoran_sphaeronectes,siphonophore_other_parts,siphonophore_partial,siphonophore_physonect_young,siphonophore_physonect,stomatopod,tornaria_acorn_worm_larvae,trichodesmium_bowtie,trichodesmium_multiple,trichodesmium_puff,trichodesmium_tuft,trochophore_larvae,tunicate_doliolid_nurse,tunicate_doliolid,tunicate_partial,tunicate_salp_chains,tunicate_salp,unknown_blobs_and_smudges,unknown_sticks,unknown_unclassified".split(
        ',')
    with open('namesClasses.dat', 'rb') as f:
        namesClasses = cPickle.load(f)
    labels = map(lambda s: s.split('\\')[-1], namesClasses)
    for i in range(len(namesClasses)):
        currentClass = namesClasses[i]
        root_class.data_search(currentClass).id_list.append(i)
    #get the total test images
    #Full set
    #fnames = glob.glob(os.path.join("competition_data", "test", "*.jpg"))
    #Smaller set
    fnames = glob.glob(os.path.join("smaller_comp", "test", "*.jpg"))

    numberofTestImages = len(fnames)

    X = np.zeros((numberofTestImages, num_features), dtype=float)

    #Get filename separate from prefix path
    images = map(lambda fileName: fileName.split('\\')[-1], fnames)

    i = 0
    # report progress for each 1% done
    report = [int((j + 1) * numberofTestImages / 100.) for j in range(100)]
    for fileName in fnames:
        # Read in the images and create the features
        image = imread(fileName, as_grey=True)
        image = rotImage(image)
        # Added from https://github.com/Newmu/Stroke-Prediction/blob/master/startPredictingGenCode.py
        thresh = 0.9 * 255
        # if image.min < 0.75*255:
        # img = image < thresh
        # else:
        # img = image
        # if img.sum() != 0:
        # imgX,imgY = np.nonzero(img)
        # imgW = imgX.max()-imgX.min()
        # imgH = imgY.max()-imgY.min()
        # if (imgW>1 and imgH>1):
        # image = image[imgX.min():imgX.max(),imgY.min():imgY.max()]
        # #----------------------------------
        cvimage = cv2.imread(fileName)
        features = getMinorMajorRatio(image)
        #__Begin moved region
        #From http://nbviewer.ipython.org/github/kqdtran/AY250-F13/blob/master/hw4/hw4.ipynb
        pca = decomposition.PCA(n_components=25)
        PCAFeatures = pca.fit_transform(image)
        PCAevr = pca.explained_variance_ratio_
        for evr in range(len(PCAevr)):
            if np.isnan(PCAevr[evr]):
                PCAevr[evr] = 0.0
        #_____________________________________________________________
        corners = getCorners(cvimage, features['orientation'])
        horslice = image[:, maxPixel / 2]
        vertslice = image[maxPixel / 2]
        #correlation = signal.correlate(image,image)
        #horcorrelation = signal.correlate(horslice,horslice)
        #vertcorrelation = signal.correlate(vertslice,vertslice)
        #crosscorrelation = signal.correlate(horslice,vertslice)
        #correlation = correlation/correlation[correlation.shape[0]/2,correlation.shape[0]/2]
        #horcorrelation = horcorrelation/horcorrelation[horcorrelation.shape[0]/2]
        #vertcorrelation = vertcorrelation/vertcorrelation[vertcorrelation.shape[0]/2]
        #crosscorrelation = crosscorrelation/crosscorrelation[horcorrelation.shape[0]/2]
        hormean = np.mean(horslice)
        horstd = np.std(horslice)
        vertmean = np.mean(vertslice)
        vertstd = np.std(vertslice)
        horcount = vertcount = 0
        for pix in horslice:
            graycheck = False
            if pix < thresh:
                if not graycheck:
                    graycheck = True
                    horcount = horcount + 1
            else:
                graycheck = False
        for pix in vertslice:
            graycheck = False
            if pix < thresh:
                if not graycheck:
                    graycheck = True
                    vertcount = vertcount + 1
            else:
                graycheck = False
        features['hsobel'] = np.nanmean(filter.hsobel(image))
        features['vsobel'] = np.nanmean(filter.vsobel(image))
        features['peaklocalmax'] = np.nanmean(peak_local_max(image))
        features['felzen'] = np.nanmean(segmentation.felzenszwalb(image))
        if np.isnan(features['peaklocalmax']):
            features['peaklocalmax'] = 0.0
        if np.isnan(features['felzen']):
            features['felzen'] = 0.0
        #hormirror = image[:maxPixel/2]-image[maxPixel-1:maxPixel/2:-1]
        #vertmirror = image[:,:maxPixel/2]-image[:,maxPixel-1:maxPixel/2:-1]
        image = resize(image, (maxPixel, maxPixel))

        #From http://scikit-image.org/docs/dev/auto_examples/plot_local_binary_pattern.html
        lbp = local_binary_pattern(image, n_points, radius, METHOD)
        n_bins = lbp.max() + 1
        lbpcounts = np.histogram(lbp, n_bins, normed=True,
                                 range=(0, n_bins))[0]
        #_____________________________________________________________
        #__Moved region was here
        #dist_trans = ndimage.distance_transform_edt(image[0])
        # Store the rescaled image pixels and the axis ratio
        #fd, hog_image = hog(image[0], orientations=8, pixels_per_cell=(2, 2),
        #    cells_per_block=(1, 1), visualise=True)
        #X[i, 0:imageSize] = np.reshape(dist_trans, (1, imageSize))
        #X[i, 0:imageSize] = np.reshape(hog_image, (1, imageSize))

        # Store the rescaled image pixels and the axis ratio
        X[i, 0:imageSize] = np.reshape(image, (1, imageSize))
        #X[i, imageSize:imageSize+corr2dsize] = np.reshape(correlation, (1,corr2dsize))
        #X[i, imageSize+corr2dsize:imageSize+corr2dsize+corrsize] = np.reshape(horcorrelation, (1,corrsize))
        #X[i, imageSize+corr2dsize+corrsize:imageSize+corr2dsize+2*corrsize] = np.reshape(vertcorrelation, (1,corrsize))
        #X[i, imageSize+corr2dsize+2*corrsize:imageSize+corr2dsize+3*corrsize] = np.reshape(crosscorrelation, (1,corrsize))
        featcount = imageSize + 3 * corrsize + corr2dsize
        for k, v in features.items():
            try:
                X[i, featcount:featcount + len(v)] = v
                featcount = featcount + len(v)
            except TypeError, te:
                X[i, featcount] = v
                featcount = featcount + 1
        X[i, featcount:featcount + lbpcounts.size] = lbpcounts
        X[i, featcount + lbpcounts.size:featcount + lbpcounts.size +
          PCAsize] = PCAevr
        X[i, featcount + lbpcounts.size + PCAsize] = np.mean(PCAFeatures)
        i += 1
        if i in report: print np.ceil(i * 100.0 / numberofTestImages), "% done"
Exemplo n.º 39
0
            X[i, imageSize + 5] = hu1
            X[i, imageSize + 6] = hu2
            X[i, imageSize + 7] = hu3
            X[i, imageSize + 8] = hu12
            X[i, imageSize + 9] = hu13
            X[i, imageSize + 10] = hu23
            X[i, imageSize + 11] = perimratio
            X[i, imageSize + 12] = arearatio
            X[i, imageSize + 13] = cornercenter
            X[i, imageSize + 14] = cornercentercoords[0]
            X[i, imageSize + 15] = cornercentercoords[1]
            X[i, imageSize + 16] = cornerstd
            X[i, imageSize + 17] = cornerstdcoords[0]
            X[i, imageSize + 18] = cornerstdcoords[1]
            X[i, imageSize + 19] = np.nanmean(filter.vsobel(image))
            X[i, imageSize + 20] = np.nanmean(filter.hsobel(image))
            X[i, imageSize + 21] = felzen
            X[i, imageSize + 22] = peaklocalmax
            # Store the classlabel
            y[i] = label
            i += 1
            # report progress for each 5% done
            report = [int((j + 1) * num_rows / 20.) for j in range(20)]
            if i in report: print np.ceil(i * 100.0 / num_rows), "% done"
    label += 1

# Loop through the classes two at a time and compare their distributions of the Width/Length Ratio

#Create a DataFrame object to make subsetting the data on the class
df = pd.DataFrame({"class": y[:], "ratio": X[:, num_features - 1]})
Exemplo n.º 40
0
			X[i, imageSize+3*corrsize+corr2dsize+5] = hu1
			X[i, imageSize+3*corrsize+corr2dsize+6] = hu2
			X[i, imageSize+3*corrsize+corr2dsize+7] = hu3
			X[i, imageSize+3*corrsize+corr2dsize+8] = hu12
			X[i, imageSize+3*corrsize+corr2dsize+9] = hu13
			X[i, imageSize+3*corrsize+corr2dsize+10] = hu23
			X[i, imageSize+3*corrsize+corr2dsize+11] = perimratio
			X[i, imageSize+3*corrsize+corr2dsize+12] = arearatio
			X[i, imageSize+3*corrsize+corr2dsize+13] = cornercenter
			X[i, imageSize+3*corrsize+corr2dsize+14] = cornercentercoords[0]
			X[i, imageSize+3*corrsize+corr2dsize+15] = cornercentercoords[1]
			X[i, imageSize+3*corrsize+corr2dsize+16] = cornerstd
			X[i, imageSize+3*corrsize+corr2dsize+17] = cornerstdcoords[0]
			X[i, imageSize+3*corrsize+corr2dsize+18] = cornerstdcoords[1]
			X[i, imageSize+3*corrsize+corr2dsize+19] = np.nanmean(filter.vsobel(image))
			X[i, imageSize+3*corrsize+corr2dsize+20] = np.nanmean(filter.hsobel(image))
			X[i, imageSize+3*corrsize+corr2dsize+21] = felzen
			X[i, imageSize+3*corrsize+corr2dsize+22] = peaklocalmax
			X[i, imageSize+3*corrsize+corr2dsize+23] = lrdiff
			X[i, imageSize+3*corrsize+corr2dsize+24] = tbdiff
			X[i, imageSize+3*corrsize+corr2dsize+25] = whu1
			X[i, imageSize+3*corrsize+corr2dsize+26] = whu2
			X[i, imageSize+3*corrsize+corr2dsize+27] = whu3
			X[i, imageSize+3*corrsize+corr2dsize+28] = whu12
			X[i, imageSize+3*corrsize+corr2dsize+29] = whu13
			X[i, imageSize+3*corrsize+corr2dsize+30] = whu23
			X[i, imageSize+3*corrsize+corr2dsize+31] = extent 
			X[i, imageSize+3*corrsize+corr2dsize+32] = minintensity
			X[i, imageSize+3*corrsize+corr2dsize+33] = meanintensity
			X[i, imageSize+3*corrsize+corr2dsize+34] = maxintensity
			X[i, imageSize+3*corrsize+corr2dsize+35] = intensityratio1
Exemplo n.º 41
0
def extract_features(im_cat_path_list):
    '''
    Extract Features takes a list of 2 element lists:
        
        [catagory of image , path to image]
    
    and extracts 15 features from this image. The output is a list of 3 element lists:
    
        [catagory of image , path to image, [list of features]]
        
    The 15 features are:
        
        1     Product of image pixel dimensions (image size)
        2     Mean of Grayscale Image
        3     Area of Sobel Edges Normalized By Image Size
        4     Area of Sobel Edges Above 2x Mean of Sobel Edges Normalized By Image Size
        5     Area of Canny Edges (Sum of booleans) Normalized By Image Size
        6     Number of Harris Corners
        7     Unique Felzenszwalb Image Segmentation Lines
        8     Area of Vertical Sobel Edges Above 2x Mean of Sobel Edges Normalized By Image Size
        9     Area of Horizontal Sobel Edges Above 2x Mean of Sobel Edges Normalized By Image Size
        10-12 Mean of Red/Green/Blue Channels (if grayscale: mean of the only color channel)
        13    Maximum Pixel Value of the Histogram of Oriented Gradients
        14    Percent of image that is light versus dark with adaptive thresholding
        15-17 Percent of image that is red/green/blue with adaptive thresholding
    '''

    cat_path_features = []
    
    for im_cat, im_path in im_cat_path_list:
        
        #RAW IMAGE
        im_raw = imread(im_path) #image matrix
        
        #RAW IMAGE FLATTENED IF NOT ALREADY FLAT
        if len(np.shape(im_raw)) == 3:
            im_raw_flat = np.median(im_raw, axis=2)
        else:
            im_raw_flat = im_raw    
        
        #Size of image
        im_y = float(im_raw.shape[0])
        im_x = float(im_raw.shape[1])
        im_size = im_y*im_x
        
        #LIST OF FEATURES
        features = []
        
        #FEATURE 1: Product of image pixel dimensions (image size)
        features.append(float(im_size))
        
        #FEATURE 2: Mean of Grayscale Image
        features.append(im_raw_flat.mean())
        
        #FEATURE 3: Area of Sobel Edges Normalized By Image Size
        im_edge_sobel = filter.sobel(im_raw_flat)
        features.append(im_edge_sobel.sum()/im_size)
        
        #FEATURE 4: Area of Sobel Edges Above 2x Mean of Sobel Edges Normalized By Image Size
        features.append(float((im_edge_sobel > im_edge_sobel.mean()*2).sum())/im_size)
        
        #FEATURE 5: Area of Canny Edges (Sum of booleans) Normalized By Image Size
        im_canny = filter.canny(im_raw_flat, sigma=8)
        features.append(im_canny.sum().astype(float)/im_size)
        
        #FEATURE 6: Number of Harris Corners
        im_corners = feature.corner_peaks(feature.corner_harris(im_raw_flat), min_distance=5)
        features.append(float(len(im_corners)))
        
        #FEATURE 7: Unique Felzenszwalb Image Segmentation Lines
        im_raw_float = util.img_as_float(im_raw[::2, ::2])
        im_felzen_segments = segmentation.felzenszwalb(im_raw_float, scale=100, sigma=0.5, min_size=50)
        features.append(float(len(np.unique(im_felzen_segments))))
        
        #FEATURE 8: Area of Vertical Sobel Edges Above 2x Mean of Sobel Edges Normalized By Image Size
        im_edge_vsobel = filter.vsobel(im_raw_flat)
        features.append(float((im_edge_vsobel > im_edge_vsobel.mean()*2).sum())/im_size)
        
        #FEATURE 9: Area of Horizontal Sobel Edges Above 2x Mean of Sobel Edges Normalized By Image Size
        im_edge_hsobel = filter.hsobel(im_raw_flat)
        features.append(float((im_edge_hsobel > im_edge_hsobel.mean()*2).sum())/im_size)
        
        #FEATURE 10-12: Mean of Red/Green/Blue Channels (if grayscale: mean of the only color channel)
        if len(np.shape(im_raw)) == 3:
            features.append(im_raw[...,0].mean())
            features.append(im_raw[...,1].mean())
            features.append(im_raw[...,2].mean())
        else:
            features.append(im_raw_flat.mean())
            features.append(im_raw_flat.mean())
            features.append(im_raw_flat.mean())
        
        #FEATURE 13: Maximum Pixel Value of the Histogram of Oriented Gradients
        im_fd, im_hog = feature.hog(im_raw_flat, orientations=8, pixels_per_cell=(16 , 16), cells_per_block=(1, 1), visualise=True)
        features.append(im_hog.max())
        
        #FEATURE 14: Percent of image that is light versus dark with adaptive thresholding
        im_thres_flat = filter.threshold_adaptive(im_raw_flat, 100 , 'mean')
        features.append(im_thres_flat.sum()/im_size)
        
        #FEATURE 15-17: Percent of image that is red/green/blue with adaptive thresholding
        im_thres_red = filter.threshold_adaptive(im_raw[...,0], 100 , 'mean')
        im_thres_green = filter.threshold_adaptive(im_raw[...,1], 100 , 'mean')
        im_thres_blue = filter.threshold_adaptive(im_raw[...,2], 100 , 'mean')
        features.append(im_thres_red.sum()/im_size)
        features.append(im_thres_green.sum()/im_size)
        features.append(im_thres_blue.sum()/im_size)
        
        #BUILD OUTPUT LIST FOR THIS IMAGE
        cat_path_features.append([im_cat, im_path, features])
        
        #CLEAR IMAGE PROC DATA
        del im_raw
        del im_raw_flat
        del im_raw_float
        del im_edge_sobel
        del im_canny
        del im_corners
        del im_felzen_segments
        del im_edge_vsobel
        del im_edge_hsobel
        del im_fd
        del im_hog
        
    return cat_path_features