def preprocess_frame(frame):
    _frame = resize(frame, output_shape=(224, 224))
    _frame = np.flip(_frame, axis=2)
    _frame = equalize(_frame)
    _frame = np.expand_dims(_frame, axis=0)
    _frame = preprocess_input(_frame * 255)
    return _frame
Example #2
0
def loadDCM(f, no_preprocess=False, dicom=False):
    wLoc = 448
    ### Load input dicom
    if dicom:
        dcmFile = dcmread(f)
        dcm = dcmFile.pixel_array
        dcm = dcm / dcm.max()
        if dcmFile.PhotometricInterpretation == 'MONOCHROME1':
            ### https://dicom.innolitics.com/ciods/ct-image/image-pixel/00280004 ###
            ### When MONOCHROME1, 0->bright, 1->dark intensities
            dcm = 1 - dcm
    else:
        ## Load input image
        dcm = imread(f)
        dcm = dcm / dcm.max()
    if not no_preprocess:
        dcm = equalize(dcm)

    if len(dcm.shape) > 2:
        dcm = rgb2gray(dcm[:, :, :3])

    ### Crop and resize image to 640x512
    hLoc = int((dcm.shape[0] / (dcm.shape[1] / wLoc)))
    if hLoc > 576:
        hLoc = 576
        wLoc = int((dcm.shape[1] / (dcm.shape[0] / hLoc)))

    img = resize(dcm, (hLoc, wLoc))
    img = torch.Tensor(img)
    pImg = torch.zeros((640, 512))
    h = (int((576 - hLoc) / 2)) + p
    w = int((448 - wLoc) / 2) + p
    roi = torch.zeros(pImg.shape)
    if w == p:
        pImg[np.abs(h):(h + img.shape[0]), p:-p] = img
        roi[np.abs(h):(h + img.shape[0]), p:-p] = 1.0
    else:
        pImg[p:-p, np.abs(w):(w + img.shape[1])] = img
        roi[p:-p, np.abs(w):(w + img.shape[1])] = 1.0

    imH = dcm.shape[0]
    imW = dcm.shape[1]
    pImg = pImg.unsqueeze(0).unsqueeze(0)
    return pImg, roi, h, w, hLoc, wLoc, imH, imW
Example #3
0
def loadDCM(f, preprocess=False, dicom=False):

    wLoc = 448
    ### Load input dicom
    if dicom:
        dcm = dcmread(f).pixel_array
    else:
        ## Load input image
        dcm = imread(f)

    if preprocess:
        dcm = dcm / dcm.max()
        dcm = equalize(dcm)

    if len(dcm.shape) > 2:
        dcm = rgb2gray(dcm[:, :, :3])

    ### Crop and resize image to 640x512
    hLoc = int((dcm.shape[0] / (dcm.shape[1] / wLoc)))
    if hLoc > 576:
        hLoc = 576
        wLoc = int((dcm.shape[1] / (dcm.shape[0] / hLoc)))

    img = resize(dcm, (hLoc, wLoc))
    img = torch.Tensor(img)
    pImg = torch.zeros((640, 512))
    h = (int((576 - hLoc) / 2)) + p
    w = int((448 - wLoc) / 2) + p
    roi = torch.zeros(pImg.shape)
    if w == p:
        pImg[np.abs(h):(h + img.shape[0]), p:-p] = img
        roi[np.abs(h):(h + img.shape[0]), p:-p] = 1.0
    else:
        pImg[p:-p, np.abs(w):(w + img.shape[1])] = img
        roi[p:-p, np.abs(w):(w + img.shape[1])] = 1.0

    imH = dcm.shape[0]
    imW = dcm.shape[1]
    pImg = pImg.unsqueeze(0).unsqueeze(0)
    return pImg, roi, h, w, hLoc, wLoc, imH, imW
Example #4
0
def align(cfa_LR,config={}):
    # configure
    bayer_pattern = my(config,BAYER_PATTERN)
    # average green channels together to estimate y_LR
    # this also downscales image 2x
    if re.match('.gg.',bayer_pattern,re.I):
        g_LR = (cfa_LR[::2,1::2] + cfa_LR[1::2,::2]) / 2.
    else:
        g_LR = (cfa_LR[::2,::2] + cfa_LR[1::2,1::2]) / 2.
    # jack up contrast to exaggerate texture
    g_LR = equalize(g_LR)
    # gather metrics
    (h,w) = g_LR.shape
    h2 = h/2 # half the height (center of image)
    w2 = w/2 # half the width (split between image pair)
    w4 = w/4 # 1/4 the width (center of left image)
    w34 = w2 + w4 # 3/4 the width (center of right image)
    template_size = 32
    ts = template_size
    ts2 = template_size / 2
    out = np.zeros((h,w2)) # FIXME
    for ox in [0,ts,0-ts]:
        for i in range(h/ts):
            y = (i * ts)
            # select the center pixels of the left image
            template = g_LR[y:y+ts,(w4+ox)-ts2:(w4+ox)+ts2]
            # now match the template to the corresponding horizontal strip of the right image
            # and accumulate into an output "strips" image
            strip = g_LR[y:y+ts,w2:]
            out[y:y+ts,:] += np.roll(match_template(strip,template,pad_input=True),0-ox,axis=1)
    # sum to horizontal scanline
    scanline = np.sum(out,axis=0)
    padding = w2/8 # ignore image edges
    max_x = np.argmax(scanline[padding:-padding]) + padding
    # offset is difference from half the width of each image in the pair
    # upscaled by a factor of 2
    dx = (w4 - max_x) * 2
    return dx
Example #5
0
 def equalized(self):
     return equalize(self.rescaled)
The equalized image [2]_ has a roughly linear cumulative distribution function
for each pixel neighborhood. The local version [3]_ of the histogram
equalization emphasizes every local greylevel variations.

.. [2] http://en.wikipedia.org/wiki/Histogram_equalization
.. [3] http://en.wikipedia.org/wiki/Adaptive_histogram_equalization

"""

from skimage import exposure
from skimage.filter import rank

ima = data.camera()
# equalize globally and locally
glob = exposure.equalize(ima) * 255
loc = rank.equalize(ima, disk(20))

# extract histogram for each image
hist = np.histogram(ima, bins=np.arange(0, 256))
glob_hist = np.histogram(glob, bins=np.arange(0, 256))
loc_hist = np.histogram(loc, bins=np.arange(0, 256))

plt.figure(figsize=(10, 10))
plt.subplot(321)
plt.imshow(ima, cmap=plt.cm.gray, interpolation='nearest')
plt.axis('off')
plt.subplot(322)
plt.plot(hist[1][:-1], hist[0], lw=2)
plt.title('histogram of grey values')
plt.subplot(323)
Example #7
0
def test_equalize_float():
    img = skimage.img_as_float(test_img)
    img_eq = exposure.equalize(img)

    cdf, bin_edges = exposure.cumulative_distribution(img_eq)
    check_cdf_slope(cdf)
Example #8
0
def test_equalize_ubyte():
    img_eq = exposure.equalize(test_img)

    cdf, bin_edges = exposure.cumulative_distribution(img_eq)
    check_cdf_slope(cdf)
Example #9
0
Histogram Equalization
"""

from skimage.util.dtype import dtype_range
from skimage import exposure
import numpy as np

img = oilCropNorth

# Contrast stretching
pl = np.percentile(img, 5)
ph = np.percentile(img, 95)
img_rescale = exposure.rescale_intensity(img, in_range=(pl, ph))

# Equalization
img_eq = exposure.equalize(img)

# display results
plt.subplot(131)
plt.imshow(img, cmap=plt.cm.gray)
plt.axis('off')
plt.title('noisy image', fontsize=20)
plt.subplot(132)
plt.imshow(img_rescale, cmap=plt.cm.gray)
plt.axis('off')
plt.title('Contrast stretching', fontsize=20)
plt.subplot(133)
plt.imshow(img_eq, cmap=plt.cm.gray)
plt.axis('off')
plt.title('Histogram equalization', fontsize=20)
Example #10
0
	def eq(self):
		'''
		performs image equalization based on skimage's algorithm
		'''
		self.img = exposure.equalize(self.img)
		self.refreshimg()
The equalized image [2]_ has a roughly linear cumulative distribution function
for each pixel neighborhood. The local version [3]_ of the histogram
equalization emphasizes every local gray-level variations.

.. [2] http://en.wikipedia.org/wiki/Histogram_equalization
.. [3] http://en.wikipedia.org/wiki/Adaptive_histogram_equalization

"""

from skimage import exposure
from skimage.filter import rank

noisy_image = img_as_ubyte(data.camera())

# equalize globally and locally
glob = exposure.equalize(noisy_image) * 255
loc = rank.equalize(noisy_image, disk(20))

# extract histogram for each image
hist = np.histogram(noisy_image, bins=np.arange(0, 256))
glob_hist = np.histogram(glob, bins=np.arange(0, 256))
loc_hist = np.histogram(loc, bins=np.arange(0, 256))

plt.figure(figsize=(10, 10))

plt.subplot(321)
plt.imshow(noisy_image, interpolation='nearest')
plt.axis('off')

plt.subplot(322)
plt.plot(hist[1][:-1], hist[0], lw=2)
Example #12
0
def test_equalize_float():
    img = skimage.img_as_float(test_img)
    img_eq = exposure.equalize(img)

    cdf, bin_edges = exposure.cumulative_distribution(img_eq)
    check_cdf_slope(cdf)
Example #13
0
def extract_features(image_path_list):
    '''
    Our feature extraction function. It takes in a list of image paths, 
    does some measurement on each image, then returns a list of the image paths
    paired with the results of the feature measurement.
    '''
    feature_list = []
    for image_path in image_path_list:
        image_array = imread(image_path)
        if len(image_array.shape) != 3:
            image_array = color.gray2rgb(image_array)

        # Create a sub-image of the center. Useful for color of subject.
        height, width = image_array.shape[0], image_array.shape[1]
        central_y, central_x = int(round(height/2.0)), int(round(width/2.0))
        central_image_array = image_array[central_y-height/10 : central_y+height/10, central_x-width/10 : central_x+width/10, :]

        # crop to square integer 16x16 size
        new_height, new_width = 16*(height/16), 16*(width/16)
        if new_height < new_width: new_width = new_height
        elif new_height > new_width: new_height = new_width
        cropped_image_array = image_array[central_y-new_height/2 : central_y+new_height/2, central_x-new_width/2 : central_x+new_width/2, :]
        #Equalize the image
        image_array_gray_equalized = exposure.equalize(color.rgb2gray(cropped_image_array))
        # Now zoom it to 128x128 for easier shape processing
        small_square_gray_image = ndimage.interpolation.zoom(image_array_gray_equalized, 128.0/new_height)

        # Create sobel filtered image
        sobel_image = ndimage.filters.sobel(image_array)
        # (1) Number of pixels in the image (super simple)
        n_pixels = image_array.size
        # (2) Median red value
        med_red_val = median(central_image_array[:,:,0])
        # (3) Median green value
        med_green_val = median(central_image_array[:,:,1])
        # (4) Median blue value
        med_blue_val = median(central_image_array[:,:,2])
        # (5) Standard Deviation of red value
        std_red_val = central_image_array[:,:,0].std()
        # (6) Standard Deviation of green value
        std_green_val = central_image_array[:,:,1].std()
        # (7) Standard Deviation of blue value
        std_blue_val = central_image_array[:,:,2].std()
        # (8) Image aspect ratio (width/height)
        aspect_ratio = float(width)/height
        # (9) Histogram Otsu thershold
        otsu_thresh = filter.threshold_otsu(image_array)
        # (10, 11, 12) Center of Mass
        center_of_mass_x, center_of_mass_y, center_of_mass_value = ndimage.measurements.center_of_mass(image_array)
        # (13) Ratio of sum of bottom half vs top half of pixels
        vertical_ratio = small_square_gray_image[:64,:].sum()/small_square_gray_image[64:,:].sum()
        # (14) Ratio of sum of left half vs right half of pixels
        horizontal_ratio = small_square_gray_image[:,:64].sum()/small_square_gray_image[:,64:].sum()
        # (15, 16, 17) Location of maximum
        max_x, max_y, max_band = ndimage.measurements.maximum_position(image_array)
        # (18, 19, 20) Location of minimum
        min_x, min_y, min_band = ndimage.measurements.minimum_position(image_array)
        # (21) Median sobel red value
        med_sobel_red_val = median(sobel_image[:,:,0])
        # (22) Median sobel green value
        med_sobel_green_val = median(sobel_image[:,:,1])
        # (23) Median sobel blue value
        med_sobel_blue_val = median(sobel_image[:,:,2])
        # (24) Standard Deviation of sobel red value
        std_sobel_red_val = sobel_image[:,:,0].std()
        # (25) Standard Deviation of sobel green value
        std_sobel_green_val = sobel_image[:,:,1].std()
        # (26) Standard Deviation of sobel blue value
        std_sobel_blue_val = sobel_image[:,:,2].std()
        # (27) Number of Harris points
        n_harris_points = feature.harris(small_square_gray_image, min_distance=6).size
        # (28) Mean of Histogram of Gradients (HOG)
        fd = feature.hog(image_array_gray_equalized, orientations=8, pixels_per_cell=(16, 16), cells_per_block=(1, 1), visualise=False)
        mean_hog = fd.mean()
        # (29) Standard Deviation of Histogram of Gradients (HOG)
        std_hog = fd.std()
        # (30) Ratio of white pixels in skeletonized image
        edge_image = filter.canny(image_array_gray_equalized, sigma=1)
        skeleton_image = morphology.skeletonize(edge_image)
        skeleton_ratio = float(skeleton_image.sum())/skeleton_image.size

        feature_list.append([image_path, 
            n_pixels,
            med_red_val,
            med_green_val,
            med_blue_val,
            std_red_val,
            std_green_val,
            std_blue_val,
            aspect_ratio,
            otsu_thresh,
            center_of_mass_x,
            center_of_mass_y,
            center_of_mass_value,
            vertical_ratio,
            horizontal_ratio,
            max_x, 
            max_y, 
            max_band,
            min_x,
            min_y,
            min_band,
            med_sobel_red_val,
            med_sobel_green_val,
            med_sobel_blue_val,
            std_sobel_red_val,
            std_sobel_green_val,
            std_sobel_blue_val,
            n_harris_points,
            mean_hog,
            std_hog,
            skeleton_ratio])            
    return feature_list
    img_cdf, bins = exposure.cumulative_distribution(img, bins)
    ax_cdf.plot(bins, img_cdf, 'r')

    return ax_img, ax_hist, ax_cdf


# Load an example image
img = data.moon()

# Contrast stretching
p2 = np.percentile(img, 2)
p98 = np.percentile(img, 98)
img_rescale = exposure.rescale_intensity(img, in_range=(p2, p98))

# Equalization
img_eq = exposure.equalize(img)


# Display results
f, axes = plt.subplots(2, 3, figsize=(8, 4))

ax_img, ax_hist, ax_cdf = plot_img_and_hist(img, axes[:, 0])
ax_img.set_title('Low contrast image')
ax_hist.set_ylabel('Number of pixels')

ax_img, ax_hist, ax_cdf = plot_img_and_hist(img_rescale, axes[:, 1])
ax_img.set_title('Contrast stretching')

ax_img, ax_hist, ax_cdf = plot_img_and_hist(img_eq, axes[:, 2])
ax_img.set_title('Histogram equalization')
ax_cdf.set_ylabel('Fraction of total intensity')
The equalized image [2]_ has a roughly linear cumulative distribution function
for each pixel neighborhood. The local version [3]_ of the histogram
equalization emphasizes every local gray-level variations.

.. [2] http://en.wikipedia.org/wiki/Histogram_equalization
.. [3] http://en.wikipedia.org/wiki/Adaptive_histogram_equalization

"""

from skimage import exposure
from skimage.filter import rank

noisy_image = img_as_ubyte(data.camera())

# equalize globally and locally
glob = exposure.equalize(noisy_image) * 255
loc = rank.equalize(noisy_image, disk(20))

# extract histogram for each image
hist = np.histogram(noisy_image, bins=np.arange(0, 256))
glob_hist = np.histogram(glob, bins=np.arange(0, 256))
loc_hist = np.histogram(loc, bins=np.arange(0, 256))

plt.figure(figsize=(10, 10))

plt.subplot(321)
plt.imshow(noisy_image, interpolation='nearest')
plt.axis('off')

plt.subplot(322)
plt.plot(hist[1][:-1], hist[0], lw=2)
args = parser.parse_args()
t = time.strftime("%Y%m%d_%H_%M")

save_dir = args.data + 'preprocessed_' + t + '/'
if not os.path.exists(save_dir):
    os.mkdir(save_dir)

files = sorted(glob.glob(args.data + '*.' + args.ext))
N = len(files)
print("Found %d files with " % N + args.ext + ' extension')
if args.ext is 'dcm' or 'DCM':
    dicom = True

for fIdx in range(N):
    f = files[fIdx]
    if dicom:
        dcm = pydicom.dcmread(f)
        img = dcm.pixel_array
        img = img / img.max()
        if dcm.PhotometricInterpretation == 'MONOCHROME1':
            img = 1 - img  ### DICOM and PNG have inverted intensities!
    else:
        img = imread(f)
        img = rgb2gray(img)
        img = img / img.max()
    if not args.no_equalize:
        img = equalize(img)
    f = save_dir + f.split('/')[-1].replace(args.ext, 'png')
    imsave(f.replace(args.ext, 'png'), img_as_ubyte(img))
    print("Processed %d/%d" % (fIdx + 1, N))
Example #17
0
"""

"""

from skimage import data, exposure
import matplotlib.pyplot as plt

camera = data.camera()
camera_equalized = exposure.equalize(camera) 



plt.figure(figsize=(7, 3))

plt.subplot(121)
plt.imshow(camera, cmap='gray', interpolation='nearest')
plt.axis('off')
plt.subplot(122)
plt.imshow(camera_equalized, cmap='gray', interpolation='nearest')
plt.axis('off')
plt.tight_layout()
plt.show()
The equalized image [2]_ has a roughly linear cumulative distribution function
for each pixel neighborhood. The local version [3]_ of the histogram
equalization emphasizes every local greylevel variations.

.. [2] http://en.wikipedia.org/wiki/Histogram_equalization
.. [3] http://en.wikipedia.org/wiki/Adaptive_histogram_equalization

"""

from skimage import exposure
from skimage.filter import rank

ima = data.camera()
# equalize globally and locally
glob = exposure.equalize(ima) * 255
loc = rank.equalize(ima, disk(20))

# extract histogram for each image
hist = np.histogram(ima, bins=np.arange(0, 256))
glob_hist = np.histogram(glob, bins=np.arange(0, 256))
loc_hist = np.histogram(loc, bins=np.arange(0, 256))

plt.figure(figsize=(10, 10))
plt.subplot(321)
plt.imshow(ima, cmap=plt.cm.gray, interpolation='nearest')
plt.axis('off')
plt.subplot(322)
plt.plot(hist[1][:-1], hist[0], lw=2)
plt.title('histogram of grey values')
plt.subplot(323)
    ax_hist.set_xlim(xmin, xmax)

    # Display cumulative distribution
    img_cdf, bins = exposure.cumulative_distribution(img, bins)
    ax_cdf.plot(bins, img_cdf, 'r')

    return ax_img, ax_hist, ax_cdf


# Load an example image
img = data.moon()

# Contrast stretching
p2 = np.percentile(img, 2)
p98 = np.percentile(img, 98)
img_rescale = exposure.equalize(img)

# Equalization
selem = disk(30)
img_eq = rank.equalize(img, selem=selem)


# Display results
f, axes = plt.subplots(2, 3, figsize=(8, 4))

ax_img, ax_hist, ax_cdf = plot_img_and_hist(img, axes[:, 0])
ax_img.set_title('Low contrast image')
ax_hist.set_ylabel('Number of pixels')

ax_img, ax_hist, ax_cdf = plot_img_and_hist(img_rescale, axes[:, 1])
ax_img.set_title('Global equalise')