Exemple #1
10
#!/usr/bin/env python

# Python 2/3 compatibility
from __future__ import print_function

import cv2

if __name__ == '__main__':
    import sys
    try:
        fn = sys.argv[1]
    except:
        fn = '../data/fruits.jpg'

    img = cv2.imread(fn)
    if img is None:
        print('Failed to load image file:', fn)
        sys.exit(1)

    img2 = cv2.logPolar(img, (img.shape[0]/2, img.shape[1]/2), 40, cv2.WARP_FILL_OUTLIERS)
    img3 = cv2.linearPolar(img, (img.shape[0]/2, img.shape[1]/2), 40, cv2.WARP_FILL_OUTLIERS)

    cv2.imshow('before', img)
    cv2.imshow('logpolar', img2)
    cv2.imshow('linearpolar', img3)

    cv2.waitKey(0)
Exemple #2
0
def dilationEstimator(i, subreg01, subreg02, win2D, maxrad, errthresh,
                      iterthresh, dispx, dispy, sclPix):
    # initialize iterations & error
    errval = float(100)
    iteration = 0
    Trev = np.eye(3, dtype=float)
    Tfor = np.eye(3, dtype=float)
    while all((errval > errthresh, iteration < iterthresh)):
        # Reconstruct images based on transform matrices
        sr01        = cv2.warpAffine(subreg01.astype(float),Tfor[0:2,:],(subreg01.shape[1],subreg01.shape[0]),\
                              cv2.INTER_LINEAR+cv2.WARP_FILL_OUTLIERS).astype(float)
        sr01 = np.nan_to_num(sr01)
        sr01 -= sr01.mean(axis=(0, 1))
        sr01 = multiply(sr01, win2D)
        sr02        = cv2.warpAffine(subreg02.astype(float),Trev[0:2,:],(subreg01.shape[1],subreg01.shape[0]),\
                              cv2.INTER_LINEAR+cv2.WARP_FILL_OUTLIERS).astype(float)
        sr02 = np.nan_to_num(sr02)
        sr02 -= sr02.mean(axis=(0, 1))
        sr02 = multiply(sr02, win2D)
        # Calculate FFTs for image pair
        fft01 = sp.fftpack.fftshift(fft01FMCObj(sr01))
        fft02 = sp.fftpack.fftshift(fft02FMCObj(sr02))
        # Run FMT on FFTs
        fmt01       = multiply(win2D,cv2.logPolar(abs(fft01).astype(float),(fft01.shape[1]/2,fft01.shape[0]/2),\
               fft01.shape[1]/log(maxrad),flags = cv2.INTER_LINEAR+cv2.WARP_FILL_OUTLIERS)).astype(float)
        fmt02       = multiply(win2D,cv2.logPolar(abs(fft02).astype(float),(fft02.shape[1]/2,fft02.shape[0]/2),\
               fft02.shape[1]/log(maxrad),flags = cv2.INTER_LINEAR+cv2.WARP_FILL_OUTLIERS)).astype(float)
        # Calculate FFTs of FMTs
        fmc01 = fft01FMCObj(fmt01)
        fmc02 = fft02FMCObj(fmt02)
        # Run Translation Subfunc
        trnsdisp = subPixel2D(
            abs(sp.fftpack.fftshift(ifftFMCObj(multiply(fft01, conj(fft02))))))
        # Store new displacement
        dispx[i] += trnsdisp[1]
        dispy[i] += trnsdisp[0]
        # Run Scaling Subfunc
        fmcdisp = subPixel2D(
            abs(sp.fftpack.fftshift(ifftFMCObj(multiply(fmc01, conj(fmc02))))))
        # Store Scale from FMC algorithm
        sclPix[i] += fmcdisp[1]
        # Update Warping Matrix
        Trev[0, 0] = np.sqrt(1 / pow(maxrad, -sclPix[i] / subreg01.shape[1]))
        Trev[1, 1] = np.sqrt(1 / pow(maxrad, -sclPix[i] / subreg01.shape[1]))
        Trev[0, 2] = (1 - Trev[0, 0]) * subreg01.shape[1] / 2 - dispx[i] / 2
        Trev[1, 2] = (1 - Trev[1, 1]) * subreg01.shape[0] / 2 - dispy[i] / 2

        Tfor[0, 0] = np.sqrt(1 * pow(maxrad, -sclPix[i] / subreg02.shape[1]))
        Tfor[1, 1] = np.sqrt(1 * pow(maxrad, -sclPix[i] / subreg02.shape[1]))
        Tfor[0, 2] = (1 - Tfor[0, 0]) * subreg02.shape[1] / 2 + dispx[i] / 2
        Tfor[1, 2] = (1 - Tfor[1, 1]) * subreg02.shape[0] / 2 + dispy[i] / 2
        # Update iteration & error value
        errval = max([sqrt(trnsdisp[1]**2 + trnsdisp[0]**2), abs(fmcdisp[1])])
        iteration += 1
    print("Registering frame %03i, Iter %03i, DispX %03.2f, DispY %03.2f, Scale %03.3f, Error %03.3f" \
              % (i,iteration,np.float(dispx[i]),np.float(dispy[i]),pow(maxrad,-sclPix[i]/subreg01.shape[1]),errval))
    return sclPix, dispx, dispy, sr01, sr02
Exemple #3
0
def aplicalogpolar(img, radius=40, x=0, y=0):

    #src2 = cv2.logPolar(img (x, y), radius, cv2.WARP_FILL_OUTLIERS)
    dst = cv2.logPolar(img, (x, y), radius, cv2.WARP_FILL_OUTLIERS)
    src2 = cv2.logPolar(dst, (x, y), radius,
                        cv2.WARP_FILL_OUTLIERS + cv2.WARP_INVERSE_MAP)
    #print ('x:',x)
    #print ('y:',y)
    #print ('r:',radius)
    return src2
Exemple #4
0
def convertPolar(zone,_heart,type,zone2=None):
    zoneshape = zone.shape
    zone = cv2.resize(zone, (zoneshape[1] * 8, zoneshape[0] * 8))
    # cv2.imwrite('./v3/6_cut_numZoneCanny.jpg', zone)
    M=zoneshape[1] * 4/math.log(_heart[2]*4)
    print(M)
    # 极坐标转换
    polar = cv2.logPolar(zone, (_heart[0] * 8, _heart[1] * 8), M, cv2.WARP_FILL_OUTLIERS)
    polar = cv2.rotate(polar, cv2.ROTATE_90_COUNTERCLOCKWISE)

    if type==2:

        zone2 = cv2.resize(zone2, (zoneshape[1] * 8, zoneshape[0] * 8))
        polar2 = cv2.logPolar(zone2, (_heart[0] * 8, _heart[1] * 8), M, cv2.WARP_FILL_OUTLIERS)
        polar2 = cv2.rotate(polar2, cv2.ROTATE_90_COUNTERCLOCKWISE)

        non_area, area, unused1 = findContours(cv2.dilate(polar, kernel5, iterations=1), polar, dst=polar2)
        # print(non_area)

    else:
        unused, area, unused1 = findContours(cv2.dilate(polar, kernel5, iterations=1), polar)

    cv2.imwrite('./v3/6_cut_polar.jpg', polar)


    numsShape = area.shape
    numsCanny = cv2.resize(area, (numsShape[1] * 4, numsShape[0] * 4))

    threshold, area = cv2.threshold(numsCanny, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
    cv2.imwrite('./v3/6_nums_canny.jpg', area)  # 直着的 带刻度带数字图



    if type==1:

        border, ho = getLineBorder(area, 20)
        _kedu = area[border[2]:, :]
        _num = area[border[0]:border[1],:]
    else:
        non_numsCanny = cv2.resize(non_area, (numsShape[1] * 4, numsShape[0] * 4))
        threshold, non_area = cv2.threshold(non_numsCanny, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
        border, ho = getLineBorder(non_area, 20)
        cv2.imwrite('./v3/6_nums_canny.jpg', non_area)  # 直着的 带刻度带数字图
        _num = area[border[2]:border[3], :]
        _kedu = area[border[0]:, :].copy()
        _kedu[border[2]-border[0]:border[3]-border[0], :]=0

    cv2.imwrite('./v3/6_nums_'+str(type)+'.jpg', _num)  # 直着的 带刻度带数字图
    cv2.imwrite('./v3/6_kedu_'+str(type)+'.jpg', _kedu)  # 直着的 带刻度带数字图
    return _kedu,_num
Exemple #5
0
def polar_area(src, heart_):
    scale = 4
    res_img = rotateImg(src, 50, heart_)

    zoneshape = src.shape
    zone = cv2.resize(res_img, (zoneshape[1] * scale, zoneshape[0] * scale))
    # cv2.imwrite('./v3/6_cut_numZoneCanny.jpg', zone)
    M = zoneshape[1] * 3 / math.log(heart_[2] * 3)
    polar = cv2.logPolar(zone, (heart_[0] * scale, heart_[1] * scale), M,
                         cv2.WARP_FILL_OUTLIERS)
    polar = cv2.rotate(polar, cv2.ROTATE_90_COUNTERCLOCKWISE)
    cv2.imwrite('./pointer3/3_polar1.jpg', polar)

    polar_mask = cv2.dilate(polar, kernel7)

    _, contours, hierarchy = cv2.findContours(polar_mask, cv2.RETR_EXTERNAL,
                                              cv2.CHAIN_APPROX_SIMPLE)

    c = sorted(contours, key=cv2.contourArea, reverse=True)[0]
    x, y, w, h = cv2.boundingRect(c)

    area = polar[y:y + h, x:x + w]
    # area = cv2.dilate(area, kernel4)
    cv2.imwrite('./pointer3/3_area.jpg', area)
    return area, [x, y, w, h]
def unfold_and_fuse(files,
                    indices,
                    tol,
                    gap,
                    edge,
                    sample_radius,
                    reverse=False):
    left = edge - gap - tol
    right = edge
    if reverse:
        indices.reverse()
    for i, indx in enumerate(indices):
        fl = files[0].replace("00001", str(indx).zfill(5))
        img = cv.imread(fl)
        center = (img.shape[1] / 2, img.shape[0] / 2)
        img2 = cv.logPolar(img, center, sample_radius, cv.WARP_FILL_OUTLIERS)
        if i == 0:
            base_img = img2.copy()
        else:
            section = img2[:, left:right].copy()
            cv.imwrite(fl.replace("frame", "Test"), section)
            base_img = np.append(base_img, base_img[:, :gap], axis=1)
            base_img[:, (left + i * gap):(right + i * gap)] = section[:, :]

    return base_img
Exemple #7
0
def getBigArea(none_pointer_img, hearts, need_cut):
    eroded2 = cv2.erode(none_pointer_img, kernel3, iterations=2)
    eroded2 = cv2.dilate(eroded2, kernel5, iterations=5)
    cv2.imwrite('./v1/cut_dilate2.jpg', eroded2)
    unused, numZone_adp, hearts = findContours(eroded2,
                                               need_cut,
                                               hearts,
                                               offset1=85,
                                               offset2=70)
    zoneshape = numZone_adp.shape
    numZone_adp = cv2.resize(numZone_adp, (zoneshape[1] * 8, zoneshape[0] * 8))
    cv2.imwrite('./v1/cut_numZoneCanny.jpg', numZone_adp)
    # 极坐标转换
    polar = cv2.logPolar(numZone_adp, (hearts[0][0] * 8, hearts[0][1] * 8),
                         600, cv2.WARP_FILL_OUTLIERS)
    polar = cv2.rotate(polar, cv2.ROTATE_90_COUNTERCLOCKWISE)
    cv2.imwrite('./v1/cut_polar.jpg', polar)

    unused, numsCanny, unused1 = findContours(cv2.dilate(polar,
                                                         kernel5,
                                                         iterations=1),
                                              polar,
                                              offset1=10,
                                              offset2=20)

    numsShape = numsCanny.shape
    numsCanny = cv2.resize(numsCanny, (numsShape[1] * 4, numsShape[0] * 4))
    threshold, numsCanny = cv2.threshold(numsCanny, 0, 255,
                                         cv2.THRESH_BINARY + cv2.THRESH_OTSU)
    cv2.imwrite('./v1/nums_canny.jpg', numsCanny)  #直着的 带刻度带数字图
    return numZone_adp, hearts, numsCanny, eroded2
Exemple #8
0
def processNonPointer(non_p, cut_mask, one_heart):
    unused, non_numZone_adp, unused = findContours(cut_mask,
                                                   non_p,
                                                   offset1=85,
                                                   offset2=70)
    zoneshape = non_numZone_adp.shape
    non_numZone_adp = cv2.resize(non_numZone_adp,
                                 (zoneshape[1] * 8, zoneshape[0] * 8))
    cv2.imwrite('./v1/cut_non_numZoneCanny.jpg', non_numZone_adp)
    polar_ = cv2.logPolar(non_numZone_adp,
                          (one_heart[0] * 8, one_heart[1] * 8), 600,
                          cv2.WARP_FILL_OUTLIERS)
    polar_ = cv2.rotate(polar_, cv2.ROTATE_90_COUNTERCLOCKWISE)
    cv2.imwrite('./v1/cut_polar_1_non.jpg', polar_)
    unused, numsCanny_non, unused1 = findContours(cv2.dilate(polar_,
                                                             kernel5,
                                                             iterations=2),
                                                  polar_,
                                                  offset1=40,
                                                  offset2=30,
                                                  big_index=0)
    numsShape = numsCanny_non.shape
    numsCanny_non = cv2.resize(numsCanny_non,
                               (numsShape[1] * 4, numsShape[0] * 4))
    threshold, numsCanny_non = cv2.threshold(
        numsCanny_non, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
    numsCanny_non = cv2.erode(numsCanny_non, kernel3, iterations=1)
    cv2.imwrite('./v1/nums_canny1_non.jpg', numsCanny_non)
    line_border_, ho = getLineBorder(numsCanny_non, 20)
    kedu_ = numsCanny_non[line_border_[0]:line_border_[1], :]
    # print(ho)
    return numsShape, polar_, line_border_, kedu_, numsCanny_non, ho
def polar2linear(center):
    image = cv2.imread('temp/final.png')
    image_polar = cv2.logPolar(image, (center[0], center[1]), 50,
                               cv2.INTER_LINEAR + cv2.WARP_FILL_OUTLIERS)
    image_polar_rotate = rotateImage(image_polar, 90)
    cv2.imwrite("temp/polar.png", image_polar_rotate)
    cv2.waitKey(0)
Exemple #10
0
def unwrap_iris(img, inner_center, inner_radius, outer_center, outer_radius):
    height, width = img.shape
    M = width / np.log(outer_radius)

    unwrapped_img = cv2.logPolar(img, outer_center, M, cv2.WARP_FILL_OUTLIERS)

    center_shift_x = np.absolute(outer_center[0] - inner_center[0])
    center_shift_y = np.absolute(outer_center[1] - inner_center[1])

    if center_shift_x > center_shift_y:
        center_shift = center_shift_x
    else:
        center_shift = center_shift_y

    useful_width = (outer_radius - inner_radius) + center_shift

    unwrapped_img = unwrapped_img[:, (width - useful_width):width + 1]

    histo_equilized_img = cv2.equalizeHist(unwrapped_img)

    scaled_img = cv2.resize(histo_equilized_img, (64, 512))

    rotated_img = cv2.rotate(scaled_img, cv2.ROTATE_90_CLOCKWISE)

    return rotated_img
Exemple #11
0
def cut_area(src, heart_, type_):

    radio = heart_[2]
    if type_ == 0:
        hei = radio / 3.5
        k = kernel6
        scale = 4
    else:
        hei = radio / 2
        k = kernel5
        scale = 4

    mask = np.zeros(src.shape, np.uint8)
    cv2.circle(mask, (heart_[0], heart_[1]), int(radio + hei), 255, -1)
    cv2.circle(mask, (heart_[0], heart_[1]), radio - 30, 0, -1)

    res_img = cv2.bitwise_and(mask, src)
    # res_img = cv2.dilate(res_img0, kernel3)
    cv2.imwrite('./pointer3/3_mask.jpg', res_img)
    if type_ == 0:
        mask = np.zeros(src.shape, np.uint8)
        cv2.circle(mask, (heart_[0], heart_[1]), int(radio + hei), 255, -1)
        cut = cv2.bitwise_and(mask, src)
        cv2.imwrite('./pointer3/3_mask0.jpg', cut)
    # _, contours, hierarchy = cv2.findContours(res_img, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
    #
    # conts = sorted(contours, key=cv2.contourArea, reverse=True)[0]
    # mask = np.zeros(src.shape, np.uint8)
    # cv2.drawContours(mask,[conts],0,255,-1)
    # cv2.imwrite('./pointer3/3_conts.jpg', mask)
    #
    # res_img = cv2.bitwise_and(mask,res_img0)
    res_img = rotateImg(res_img, 50, heart_)

    zoneshape = src.shape
    zone = cv2.resize(res_img, (zoneshape[1] * scale, zoneshape[0] * scale))
    # cv2.imwrite('./v3/6_cut_numZoneCanny.jpg', zone)
    M = zoneshape[1] * 3 / math.log(radio * 3)
    polar = cv2.logPolar(zone, (heart_[0] * scale, heart_[1] * scale), M,
                         cv2.WARP_FILL_OUTLIERS)
    polar = cv2.rotate(polar, cv2.ROTATE_90_COUNTERCLOCKWISE)
    cv2.imwrite('./pointer3/3_polar1.jpg', polar)

    polar_mask = cv2.dilate(polar, kernel7)

    _, contours, hierarchy = cv2.findContours(polar_mask, cv2.RETR_EXTERNAL,
                                              cv2.CHAIN_APPROX_SIMPLE)

    c = sorted(contours, key=cv2.contourArea, reverse=True)[0]
    x, y, w, h = cv2.boundingRect(c)

    area = polar[y:y + h, x:x + w]
    # area = cv2.dilate(area, kernel4)
    cv2.imwrite('./pointer3/3_area.jpg', area)
    if type_ == 0:
        return area, cut
    else:
        return area
Exemple #12
0
    def to_log_polar(image_gray):
        """
        Convert gray scale image to log polar image.

        :param image_gray: target gray scale image
        :return: log polar image
        """
        (rows, cols) = image_gray.shape
        center = (cols // 2, rows // 2)
        M = rows / (math.log(round(min(rows, cols) / 2)))
        flags = cv2.INTER_LINEAR | cv2.WARP_FILL_OUTLIERS
        return cv2.logPolar(np.float32(image_gray), center, M, flags)
    def compute(self):
        self.roi = self._crop_roi()

        # Retina image transforms (Parvo & Magnocellular).
        self.retina.run(self.roi)
        parvo = self.retina.getParvo()
        magno = self.retina.getMagno()

        # Log Polar Transform.
        center = self.retina_diameter / 2
        M = self.retina_diameter * self.fovea_scale
        parvo = cv2.logPolar(parvo,
                             center=(center, center),
                             M=M,
                             flags=cv2.WARP_FILL_OUTLIERS)
        magno = cv2.logPolar(magno,
                             center=(center, center),
                             M=M,
                             flags=cv2.WARP_FILL_OUTLIERS)
        parvo = scipy.misc.imresize(
            parvo, (self.output_diameter, self.output_diameter))
        magno = scipy.misc.imresize(
            magno, (self.output_diameter, self.output_diameter))

        # Apply rotation by rolling the images around axis 1.
        rotation = self.output_diameter * self.orientation / (2 * math.pi)
        rotation = int(round(rotation))
        self.parvo = np.roll(parvo, rotation, axis=0)
        self.magno = np.roll(magno, rotation, axis=0)

        # Encode images into SDRs.
        p = self.parvo_enc.encode(self.parvo)
        pr, pg, pb = np.dsplit(p, 3)
        p = np.logical_and(np.logical_and(pr, pg), pb)
        p = np.expand_dims(np.squeeze(p), axis=2)
        m = self.magno_enc.encode(self.magno)
        sdr = np.concatenate([p, m], axis=2)
        self.output_sdr.dense = sdr
        return self.output_sdr
Exemple #14
0
    def match_new(self, newImg):
        self.cmp = newImg
        height, width = self.cmp.shape
        cy, cx = height / 2, width / 2
        G_b = np.fft.fft2(self.cmp * self.hanw)
        self.LB = np.fft.fftshift(np.log(np.absolute(G_b) + 1))
        self.LPB = cv2.logPolar(self.LB, (cy, cx),
                                self.Mag,
                                flags=cv2.INTER_LINEAR +
                                cv2.WARP_FILL_OUTLIERS)
        self.LPB_filt = self.LPB * self.Mask
        # 1.4: Phase Correlate to Get Rotation and Scaling
        Diff, peak, self.r_rotatescale = self.PhaseCorrelation(
            self.LPA_filt, self.LPB_filt)
        theta1 = 2 * math.pi * Diff[1] / height
        # deg
        theta2 = theta1 + math.pi
        # deg theta ambiguity
        invscale = math.exp(Diff[0] / self.Mag)
        # 2.1: Correct rotation and scaling
        b1 = self.Warp_4dof(self.cmp, [0, 0, theta1, invscale])
        b2 = self.Warp_4dof(self.cmp, [0, 0, theta2, invscale])

        # 2.2 : Translation estimation
        diff1, peak1, self.r1 = self.PhaseCorrelation(
            self.ref, b1)  #diff1, peak1 = PhaseCorrelation(a,b1)
        diff2, peak2, self.r2 = self.PhaseCorrelation(
            self.ref, b2)  #diff2, peak2 = PhaseCorrelation(a,b2)
        # Use cv2.phaseCorrelate(a,b1) because it is much faster

        # 2.3: Compare peaks and choose true rotational error
        if peak1 > peak2:
            Trans = diff1
            peak = peak1
            theta = -theta1
        else:
            Trans = diff2
            peak = peak2
            theta = -theta2

        if theta > math.pi:
            theta -= math.pi * 2
        elif theta < -math.pi:
            theta += math.pi * 2

        # Results
        self.param = [Trans[0], Trans[1], theta, 1 / invscale]
        self.peak = peak
        self.perspective = self.poc2warp(self.center, self.param)
        self.affine = self.perspective[0:2, :]
Exemple #15
0
def polarkoordinatensystem_konverter(src):
    src = cv2.imread(src)

    maxRadius = math.sqrt(
        math.pow(src.shape[0], 2) + math.pow(src.shape[1], 2)) / 2
    magnitude = src.shape[0] / math.log(maxRadius)
    center = (src.shape[0] / 2, src.shape[1] / 2)
    polar = cv2.logPolar(src, center, magnitude, cv2.INTER_AREA)

    cv2.imshow('Polarkoordinatensystem', polar)
    cv2.waitKey(0)
    cv2.destroyAllWindows()

    cv2.imwrite('log_polar_kovertiert.bmp', polar)  # speichern
Exemple #16
0
def get_img_fft(imagedir):
    imgs_path = imagedir
    #glob.glob(os.path.join(imagedir, '*.jpg'))
    for img_path in imgs_path:
        img = cv2.imread(img_path, 0)
        fft = np.fft.fft2(img)
        fft_shift = np.fft.fftshift(fft)
        s2 = np.log(np.abs(fft_shift))
        y, x = img.shape
        polar = cv2.logPolar(
            s2, (y // 2, x // 2), 40,
            cv2.INTER_LINEAR + cv2.WARP_FILL_OUTLIERS + cv2.WARP_INVERSE_MAP)
        plt.subplot(131), plt.imshow(img, 'gray'), plt.title('original')
        plt.subplot(132), plt.imshow(s2, 'gray'), plt.title('center')
        plt.subplot(133), plt.imshow(polar, 'gray'), plt.title('polar')
        plt.show()
Exemple #17
0
    def init(self, first_frame, bbox):
        first_frame = first_frame.astype(np.float32)
        bbox = np.array(bbox).astype(np.int64)
        x, y, w, h = tuple(bbox)
        self._center = (x + w / 2, y + h / 2)
        self.w, self.h = w, h
        self.crop_size = (int(w * (1 + self.padding)),
                          int(h * (1 + self.padding)))
        self.base_target_size = (self.w, self.h)
        self.target_sz = (self.w, self.h)
        self._window = cos_window(self.crop_size)
        output_sigma = np.sqrt(self.w * self.h) * self.output_sigma_factor
        self.y = gaussian2d_labels(self.crop_size, output_sigma)
        self._init_response_center = np.unravel_index(
            np.argmax(self.y, axis=None), self.y.shape)
        self.yf = fft2(self.y)

        patch = self.get_sub_window(first_frame, self._center, self.crop_size,
                                    self.sc)
        xl = self.get_feature_map(patch)
        xl = xl * self._window[:, :, None]
        self.xlf = fft2(xl)
        self.hf_den = np.sum(self.xlf * np.conj(self.xlf), axis=2)
        self.hf_num = self.yf[:, :, None] * np.conj(self.xlf)

        avg_dim = (w + h) / 2.5
        self.scale_sz = ((w + avg_dim) / self.sc, (h + avg_dim) / self.sc)
        self.scale_sz0 = self.scale_sz
        self.cos_window_scale = cos_window(
            (self.scale_sz_window[0], self.scale_sz_window[1]))
        self.mag = self.cos_window_scale.shape[0] / np.log(
            np.sqrt((self.cos_window_scale.shape[0]**2 +
                     self.cos_window_scale.shape[1]**2) / 4))

        # scale lp
        patchL = cv2.getRectSubPix(first_frame,
                                   (int(np.floor(self.sc * self.scale_sz[0])),
                                    int(np.floor(self.sc * self.scale_sz[1]))),
                                   self._center)
        patchL = cv2.resize(patchL, self.scale_sz_window)
        patchLp = cv2.logPolar(patchL.astype(np.float32),
                               ((patchL.shape[1] - 1) / 2,
                                (patchL.shape[0] - 1) / 2),
                               self.mag,
                               flags=cv2.INTER_LINEAR + cv2.WARP_FILL_OUTLIERS)

        self.model_patchLp = extract_hog_feature(patchLp, cell_size=4)
Exemple #18
0
def curvilinear_to_linear(c_image):  # TODO Rewrite hacky code
    """Convert curvilinear data to linear data to aid analysis"""
    color = [0]  # black border
    # border widths; I set them all to 150
    top, bottom, left, right = [0, 2000, 0, 2000]
    img_with_border = cv2.copyMakeBorder(c_image,
                                         top,
                                         bottom,
                                         left,
                                         right,
                                         cv2.BORDER_CONSTANT,
                                         value=color)
    dst = cv2.logPolar(img_with_border, (392.72189105, -307.7655977), 400,
                       cv2.WARP_FILL_OUTLIERS)
    # toimage(dst).show()
    rows, cols = dst.shape
    M = cv2.getRotationMatrix2D((cols / 2, rows / 2), 90, 1)
    dst = cv2.warpAffine(dst, M, (cols, rows))
    dst = dst[353:468, 553:980]
    return dst
Exemple #19
0
 def update(self, im, pos, base_target_sz, current_scale_factor):
     patchL = cv2.getRectSubPix(
         im, (int(np.floor(current_scale_factor * self.scale_sz[0])),
              int(np.floor(current_scale_factor * self.scale_sz[1]))), pos)
     patchL = cv2.resize(patchL, self.scale_sz_window)
     # convert into logpolar
     patchLp = cv2.logPolar(patchL.astype(np.float32),
                            ((patchL.shape[1] - 1) / 2,
                             (patchL.shape[0] - 1) / 2),
                            self.mag,
                            flags=cv2.INTER_LINEAR + cv2.WARP_FILL_OUTLIERS)
     patchLp = extract_hog_feature(patchLp, cell_size=4)
     tmp_sc, _, _ = self.estimate_scale(self.model_patchLp, patchLp,
                                        self.mag)
     tmp_sc = np.clip(tmp_sc, a_min=0.6, a_max=1.4)
     scale_factor = current_scale_factor * tmp_sc
     self.model_patchLp = (
         1 - self.learning_rate_scale
     ) * self.model_patchLp + self.learning_rate_scale * patchLp
     return scale_factor
Exemple #20
0
def logPolar(im,center=None,radius=None,M=None,size=(64,128)):
    '''
    Produce a log polar transform of the image.  See OpenCV for details.
    The scale space is calculated based on radius or M.  If both are given 
    M takes priority.
    '''
    #M=1.0
    w,h = im.size
    if radius == None:
        radius = 0.5*min(w,h)
        
    if M == None:
        #rho=M*log(sqrt(x2+y2))
        #size[0] = M*log(r)
        M = size[0]/np.log(radius)

    if center == None:
        center = pv.Point(0.5*w,0.5*h)
    src = im.asOpenCV2()
    dst = cv2.logPolar( src, center.asOpenCV(), M, cv2.INTER_LINEAR+cv2.WARP_FILL_OUTLIERS )
    return pv.Image(dst)
def centers_to_square(file_name):
    file = path.join(
        train_path,
        file_name  #'c4482b38-bbbc-11e8-b2ba-ac1f6b6435d0'
        #'004d8a0e-bbc4-11e8-b2bc-ac1f6b6435d0'
    )
    image_blue_ch = np.array(Image.open(file + '_blue.png'))
    image_red_ch = np.array(Image.open(file + '_red.png'))
    dtr = 64
    centers = find_centers(image_blue_ch, dtr)

    for center in centers:
        if dtr < center[1] > 512 - dtr or dtr < center[0] > 512 - dtr:
            continue
        img = image_red_ch[center[1] - dtr:center[1] + dtr,
                           center[0] - dtr:center[0] + dtr]
        cv2.imshow("Image", img)
        cv2.waitKey(0)
        dst = cv2.logPolar(img, (dtr, dtr), dtr / 3, cv2.INTER_LINEAR)
        cv2.imshow("Image", dst)
        cv2.waitKey(0)
Exemple #22
0
def main():
    import sys
    try:
        fn = sys.argv[1]
    except IndexError:
        fn = 'fruits.jpg'

    img = cv.imread(cv.samples.findFile(fn))
    if img is None:
        print('Failed to load image file:', fn)
        sys.exit(1)

    img2 = cv.logPolar(img, (img.shape[0]/2, img.shape[1]/2), 40, cv.WARP_FILL_OUTLIERS)
    img3 = cv.linearPolar(img, (img.shape[0]/2, img.shape[1]/2), 40, cv.WARP_FILL_OUTLIERS)

    cv.imshow('before', img)
    cv.imshow('logpolar', img2)
    cv.imshow('linearpolar', img3)

    cv.waitKey(0)
    print('Done')
Exemple #23
0
def main():
    import sys
    try:
        fn = sys.argv[1]
    except IndexError:
        fn = 'data/fruits.jpg'

    img = cv.imread(cv.samples.findFile(fn))
    if img is None:
        print('Failed to load image file:', fn)
        sys.exit(1)

    img2 = cv.logPolar(img, (img.shape[0]/2, img.shape[1]/2), 40, cv.WARP_FILL_OUTLIERS)
    img3 = cv.linearPolar(img, (img.shape[0]/2, img.shape[1]/2), 40, cv.WARP_FILL_OUTLIERS)

    cv.imshow('before', img)
    cv.imshow('logpolar', img2)
    cv.imshow('linearpolar', img3)

    cv.waitKey(0)
    print('Done')
Exemple #24
0
def straightenCircle(img):
    circles = cv2.HoughCircles(img,
                               method=cv2.HOUGH_GRADIENT,
                               dp=1,
                               minDist=3,
                               circles=None,
                               param1=200,
                               param2=100,
                               minRadius=200,
                               maxRadius=0)

    # Get the mean of centers and do offset
    circles = np.int0(np.array(circles))
    x, y, r = 0, 0, 0
    canvas = img.copy()
    for ptx, pty, radius in circles[0]:
        cv2.circle(canvas, (ptx, pty), radius, (0, 255, 0), 1, 16)
        x += ptx
        y += pty
        r += radius

    cnt = len(circles[0])
    x = x // cnt
    y = y // cnt
    r = r // cnt
    x += 5
    y -= 7

    # Draw the labels in red
    for r in range(100, r, 20):
        cv2.circle(canvas, (x, y), r, (0, 0, 255), 3, cv2.LINE_AA)
    cv2.circle(canvas, (x, y), 3, (0, 0, 255), -1)
    # (5) Crop the image
    dr = r + 20
    croped = img[y - dr:y + dr + 1, x - dr:x + dr + 1].copy()
    # (6) logPolar and rotate
    polar = cv2.logPolar(croped, (dr, dr), 60, cv2.WARP_FILL_OUTLIERS)
    rotated = cv2.rotate(polar, cv2.ROTATE_90_COUNTERCLOCKWISE)
    rotated = rotated[100:170, 0:490].copy()
    return rotated
Exemple #25
0
def polar_num(src, heart_, sets, index):
    scale = 4
    res_img = rotateImg(src, 50, heart_)

    zoneshape = src.shape
    zone = cv2.resize(res_img, (zoneshape[1] * scale, zoneshape[0] * scale))
    # cv2.imwrite('./v3/6_cut_numZoneCanny.jpg', zone)
    M = zoneshape[1] * 3 / math.log(heart_[2] * 3)
    polar = cv2.logPolar(zone, (heart_[0] * scale, heart_[1] * scale), M,
                         cv2.WARP_FILL_OUTLIERS)
    polar = cv2.rotate(polar, cv2.ROTATE_90_COUNTERCLOCKWISE)
    x, y, w, h = sets
    polar = polar[y - int(h / 3):y + int(h / 3), x:x + w + 100]
    polar_mask = cv2.dilate(polar, kernel9)
    cv2.imwrite('./pointer3/4_polar_num.jpg', polar_mask)
    _, contours, hierarchy = cv2.findContours(polar_mask, cv2.RETR_EXTERNAL,
                                              cv2.CHAIN_APPROX_SIMPLE)
    nums_set = []
    for c in contours:
        x, y, w, h = cv2.boundingRect(c)
        nums_set.append([x, y, w, h])
    # print(nums_set)
    nums_set = sorted(nums_set, key=lambda a: a[0])
    # print(nums_set)
    x, y, w, h = nums_set[0]
    num1 = polar[y:y + h, x:x + w]

    x, y, w, h = nums_set[-1]
    num2 = polar[y:y + h, x:x + w]

    if index == 1:
        num1 = rotateImg(num1, 180)
        num2 = rotateImg(num2, 180)

    cv2.imwrite('./pointer3/4_num1.jpg', num1)
    cv2.imwrite('./pointer3/4_num2.jpg', num2)
    recog_num(num1)
    # recog_num(num2)
    return nums_set, num1, num2
Exemple #26
0
def logPolar(im,center=None,radius=None,M=None,size=(64,128)):
    '''
    Produce a log polar transform of the image.  See OpenCV for details.
    The scale space is calculated based on radius or M.  If both are given 
    M takes priority.
    '''
    #M=1.0
    w,h = im.size
    if radius == None:
        radius = 0.5*min(w,h)
        
    if M == None:
        #rho=M*log(sqrt(x2+y2))
        #size[0] = M*log(r)
        M = size[0]/np.log(radius)

    if center == None:
        center = pv.Point(0.5*w,0.5*h)
    src = im.asOpenCV2()
    import cv2
    dst = cv2.logPolar( src, center.asOpenCV(), M, cv2.INTER_LINEAR+cv2.WARP_FILL_OUTLIERS )
    return pv.Image(dst)
Exemple #27
0
def processSmallKedu(zone2polar, one_heart, numsShape, polar_, line_border_):
    polar = cv2.logPolar(zone2polar, (one_heart[0] * 8, one_heart[1] * 8), 600,
                         cv2.WARP_FILL_OUTLIERS)
    polar = cv2.rotate(polar, cv2.ROTATE_90_COUNTERCLOCKWISE)
    cv2.imwrite('./v1/cut_polar_1.jpg', polar)
    unused, numsCanny_1, unused1 = findContours(cv2.dilate(polar_,
                                                           kernel5,
                                                           iterations=2),
                                                polar,
                                                offset1=40,
                                                offset2=30,
                                                big_index=0)
    numsCanny_1 = cv2.resize(numsCanny_1, (numsShape[1] * 4, numsShape[0] * 4))
    cv2.imwrite('./v1/nums_canny1.jpg', numsCanny_1)
    kedu_1_pointer = numsCanny_1[line_border_[0]:line_border_[1] + 60, :]
    kedu_1_pointer = cv2.erode(kedu_1_pointer, kernel3)
    cv2.imwrite('./v1/nums_kedu_1_pointer.jpg', kedu_1_pointer)
    ver_ = projectVertical(kedu_1_pointer)
    (h1, w1) = kedu_1_pointer.shape
    newHorizon_ = np.zeros([h1, w1], np.uint8)

    for i in range(0, w1):
        for j in range(0, ver_[i]):
            newHorizon_[j, i] = 255

    cv2.imwrite('./v1/nums_kedu_111.jpg', newHorizon_)
    maxtab, mintab = peakdetective.peakdet(ver_, 30)
    # from matplotlib.pyplot import plot, scatter, show
    # plot(ver)
    # scatter(np.array(maxtab)[:, 0], np.array(maxtab)[:, 1], color='blue')
    # scatter(np.array(mintab)[:, 0], np.array(mintab)[:, 1], color='red')
    # show()
    k_res = list(maxtab[:, 1])
    # print(res.index(max(res)),len(res))
    k_pos = k_res.index(max(k_res))
    k_len = len(k_res)
    print('position', k_res.index(max(k_res)), '总刻度线数:', (len(k_res) - 1))
    return k_pos, k_len
Exemple #28
0
def polar_transform(images, transform_type='linearpolar'):
    """
    This function takes multiple images, and apply polar coordinate conversion to it.
    """

    (N, C, H, W) = images.shape

    for i in range(images.shape[0]):

        img = images[i].numpy()  # [C,H,W]
        img = np.transpose(img, (1, 2, 0))  # [H,W,C]

        if transform_type == 'logpolar':
            img = cv.logPolar(img, (H // 2, W // 2), W / math.log(W / 2),
                              cv.WARP_FILL_OUTLIERS).reshape(H, W, C)
        elif transform_type == 'linearpolar':
            img = cv.linearPolar(img, (H // 2, W // 2), W / 2,
                                 cv.WARP_FILL_OUTLIERS).reshape(H, W, C)
        img = np.transpose(img, (2, 0, 1))

        images[i] = torch.from_numpy(img)

    return images
Exemple #29
0
def cut_area2(src, heart_, type_):
    # src = cv2.cvtColor(src,cv2.COLOR_BGR2GRAY)
    radio = heart_[2]
    hei = radio / 2
    k = kernel5
    scale = 4
    mask = np.zeros(src.shape, np.uint8)
    cv2.circle(mask, (heart_[0], heart_[1]), int(radio + hei), 255, -1)
    cv2.circle(mask, (heart_[0], heart_[1]), radio - 30, 0, -1)

    res_img = cv2.bitwise_and(mask, src)
    # res_img = cv2.dilate(res_img0, kernel3)
    cv2.imwrite('./pointer3/3_0_mask.jpg', res_img)
    res_img = rotateImg(res_img, 50, heart_)

    zoneshape = src.shape
    zone = cv2.resize(res_img, (zoneshape[1] * scale, zoneshape[0] * scale))
    # cv2.imwrite('./v3/6_cut_numZoneCanny.jpg', zone)
    M = zoneshape[1] * 3 / math.log(radio * 3)
    polar = cv2.logPolar(zone, (heart_[0] * scale, heart_[1] * scale), M,
                         cv2.WARP_FILL_OUTLIERS)
    polar = cv2.rotate(polar, cv2.ROTATE_90_COUNTERCLOCKWISE)
    polar = cv2.Canny(polar, 80, 100)
    cv2.imwrite('./pointer3/3_0_polar1.jpg', polar)
Exemple #30
0
    def init(self, im, pos, base_target_sz, current_scale_factor):
        w, h = base_target_sz
        avg_dim = (w + h) / 2.5
        self.scale_sz = ((w + avg_dim) / current_scale_factor,
                         (h + avg_dim) / current_scale_factor)
        self.scale_sz0 = self.scale_sz
        self.cos_window_scale = cos_window(
            (self.scale_sz_window[0], self.scale_sz_window[1]))
        self.mag = self.cos_window_scale.shape[0] / np.log(
            np.sqrt((self.cos_window_scale.shape[0]**2 +
                     self.cos_window_scale.shape[1]**2) / 4))

        # scale lp
        patchL = cv2.getRectSubPix(
            im, (int(np.floor(current_scale_factor * self.scale_sz[0])),
                 int(np.floor(current_scale_factor * self.scale_sz[1]))), pos)
        patchL = cv2.resize(patchL, self.scale_sz_window)
        patchLp = cv2.logPolar(patchL.astype(np.float32),
                               ((patchL.shape[1] - 1) / 2,
                                (patchL.shape[0] - 1) / 2),
                               self.mag,
                               flags=cv2.INTER_LINEAR + cv2.WARP_FILL_OUTLIERS)

        self.model_patchLp = extract_hog_feature(patchLp, cell_size=4)
Exemple #31
0
def logpolar(src, center, magnitude_scale=40):
    return cv2.logPolar(
        src, center, magnitude_scale, cv2.INTER_CUBIC + cv2.WARP_FILL_OUTLIERS
    )
Exemple #32
0
#!/usr/bin/env python
#coding:utf-8
#说明:本脚本用来测试opencv中集成的phaseCorrelation算法
import cv2
import numpy as np

if __name__ == '__main__':
    img_src = cv2.imread('img_raw.png', 0)
    img_dst = cv2.imread("img_r_t.png", 0)
    rows, cols = img_src.shape

    polar_src = img_src
    polar_dst = img_dst

    polar_src = cv2.logPolar(img_src,
                             (img_src.shape[0] / 2, img_src.shape[1] / 2), 70,
                             cv2.WARP_FILL_OUTLIERS + cv2.INTER_LINEAR)
    polar_dst = cv2.logPolar(img_dst,
                             (img_dst.shape[0] / 2, img_dst.shape[1] / 2), 70,
                             cv2.WARP_FILL_OUTLIERS + cv2.INTER_LINEAR)
    polar_src = np.float32(polar_src)
    polar_dst = np.float32(polar_dst)
    r = cv2.phaseCorrelate(polar_src, polar_dst)
    yaw = r[0][1] * 180 / (img_src.shape[1] / 2)
    print "yaw:", yaw
    M = cv2.getRotationMatrix2D((cols / 2, rows / 2), yaw, 1)
    img_r = cv2.warpAffine(img_src, M, (cols, rows))
    img_r = np.float32(img_r)
    img_dst = np.float32(img_dst)
    t = cv2.phaseCorrelate(img_r, img_dst)
    print "translation:", t
Exemple #33
0
(x, y), radius = cv2.minEnclosingCircle(cnt)
center = (int(x), int(y))
radius = int(radius)
img = cv2.imread('oko01.png')
img = cv2.circle(img, center, radius, (0, 255, 0), 2)
print(radius)
contours, _ = cv2.findContours(bin_img_teczowka, 1, 2)
cnt = contours[1]
(x, y), radius = cv2.minEnclosingCircle(cnt)
center = (int(x), int(y))
radius = int(radius)
print(radius)

img = cv2.circle(img, center, radius, (0, 0, 255), 2)
#first option
img2 = cv2.linearPolar(img,
                       center=(x, y),
                       maxRadius=52,
                       flags=cv2.WARP_FILL_OUTLIERS)
#second option
img3 = cv2.logPolar(img, center=(x, y), M=52, flags=cv2.WARP_FILL_OUTLIERS)

plt.imshow(img)
plt.show()

plt.imshow(img2)
plt.show()

plt.imshow(img3)
plt.show()
def logpolar(src, center, magnitude_scale = 40):
    return cv2.logPolar(src, center, magnitude_scale, \
                cv2.INTER_CUBIC+cv2.WARP_FILL_OUTLIERS)