Пример #1
0
def fmatrix(xy1, xy2):
    # Normalize the coordinates of the features
    T1 = normalization_matrix(xy1)
    T2 = normalization_matrix(xy2)
    xy1_T1 = dot(xy1, T1.T)  # T1 * X
    xy2_T2 = dot(xy2, T2.T)  # T2 * X'

    # RANSAC to reject outliers and LM initialized with RANSAC output
    F0_T, inliers = ransac(FundamentalMatrixModel(), xy1_T1, xy2_T2, 8, 1e-3)
    F0 = dot(T2.T, dot(F0_T, T1))

    Fp0, P1, P2 = params_from_fmatrix(F0)

    xy1_inliers = dot(xy1, P1.T)[inliers]
    xy2_inliers = dot(xy2, P2.T)[inliers]
    PZPT1 = dot(P1, dot(diag([1, 1, 0]), P1.T))
    PZPT2 = dot(P2, dot(diag([1, 1, 0]), P2.T))
    Fp, ier = opt.leastsq(lambda params: fundamental_matrix_error(
        fmatrix_from_params(params), PZPT1, PZPT2, xy1_inliers, xy2_inliers),
                          Fp0,
                          xtol=1e-6,
                          ftol=1e-6,
                          maxfev=15000)
    F = dot(P2.T, dot(fmatrix_from_params(Fp), P1))

    del xy1_T1, xy2_T2
    return stabilize(normalize_norm(F)), inliers
Пример #2
0
def matches(img1,img2):
	""" Finds matches, then returns a homography using RANSAC. """

	hessian_tresh = 500 # may speed up, but gives smaller amount of kp.
	surf = cv2.SURF(500,upright=True)
	# SURF keypoints and descriptors
	kp1, des1 = surf.detectAndCompute(img1,None)
	kp2, des2 = surf.detectAndCompute(img2,None)
	# Find matches using k nearest neighbors
	index_params = dict(algorithm = 0, trees = 5)
	search_params = dict(checks = 10)
	flann = cv2.FlannBasedMatcher(index_params, search_params)
	matches = flann.knnMatch(des1,des2,k=2)

	# best_matches. 
	best_m = [] 
	for a,b in matches:
	    if a.distance < 0.7*b.distance:
		best_m.append(a)

	img1_pts = np.float32([ kp1[m.queryIdx].pt for m in best_m ]).reshape(-1,1,2)	
	img2_pts = np.float32([ kp2[m.trainIdx].pt for m in best_m ]).reshape(-1,1,2)

	w = len(best_m)
	img1p = zeros((2,w))
	img2p = zeros((2,w))

	# Get some useful data
	i = 0;
	for m in img1_pts:
		img1p[0][i] = m[0][0]
		img1p[1][i] = m[0][1]
		i += 1		

	i = 0;
	for m in img2_pts:
		img2p[0][i] = m[0][0]
		img2p[1][i] = m[0][1]
		i += 1		
	
	H,inliers,inliers2 = ransac(img1p,img2p,75,3.0)
	return H
Пример #3
0
def fmatrix(xy1, xy2):
	# Normalize the coordinates of the features
	T1 = normalization_matrix(xy1)
	T2 = normalization_matrix(xy2)
	xy1_T1 = dot(xy1, T1.T)		# T1 * X
	xy2_T2 = dot(xy2, T2.T)		# T2 * X'

	# RANSAC to reject outliers and LM initialized with RANSAC output
	F0_T, inliers = ransac(FundamentalMatrixModel(), xy1_T1, xy2_T2, 8, 1e-3)
	F0 = dot(T2.T, dot(F0_T, T1))
	
	Fp0, P1, P2 = params_from_fmatrix(F0)
	
	xy1_inliers = dot(xy1, P1.T)[inliers]
	xy2_inliers = dot(xy2, P2.T)[inliers]
	PZPT1 = dot(P1, dot(diag([1, 1, 0]), P1.T))
	PZPT2 = dot(P2, dot(diag([1, 1, 0]), P2.T))
	Fp, ier = opt.leastsq(lambda params: fundamental_matrix_error(fmatrix_from_params(params), PZPT1, PZPT2, xy1_inliers, xy2_inliers), Fp0,
		xtol = 1e-6, ftol = 1e-6, maxfev = 15000)
	F = dot(P2.T, dot(fmatrix_from_params(Fp), P1))
	
	del xy1_T1, xy2_T2
	return stabilize(normalize_norm(F)), inliers
Пример #4
0
def fitLineWithRansac(points, distance_cutoff):
    [best_model, best_inliers] = ransac(points, 2, LineModel, lambda model, pt: model.distanceToPointSquared(pt) < (distance_cutoff * distance_cutoff))
    return [best_model, best_inliers]
Пример #5
0
def main():
    im1 = cv2.imread('./dataset/a1.jpg', 0)
    im2 = cv2.imread('./dataset/a2.jpg', 0)
    im1_copy = im1.copy()
    im1 = cv2.copyMakeBorder(im1, 200, 200, 500, 500, cv2.BORDER_CONSTANT)

    sift = cv2.xfeatures2d.SIFT_create()
    kp1, des1 = sift.detectAndCompute(im1, None)
    kp2, des2 = sift.detectAndCompute(im2, None)

    matcher = cv2.BFMatcher(cv2.NORM_L2, True)
    matches = matcher.match(des1, des2)

    correspondenceList = []

    for m in matches:
        (x1, y1) = kp1[m.queryIdx].pt
        (x2, y2) = kp2[m.trainIdx].pt
        correspondenceList.append([x1, y1, x2, y2])
    src_pts = np.float32([kp1[m.queryIdx].pt
                          for m in matches]).reshape(-1, 1, 2)
    dst_pts = np.float32([kp2[m.trainIdx].pt
                          for m in matches]).reshape(-1, 1, 2)

    best = -1
    corrs = np.matrix(correspondenceList)
    out_arr, inliers = ransac(corrs, 5.0)
    print(len(out_arr))
    mi = np.inf
    final_image = None
    for i in trange(len(out_arr)):
        out_ransac = out_arr[i]
        out = cv2.warpPerspective(im2, scipy.linalg.inv(out_ransac),
                                  (im1.shape[1], im1.shape[0]))
        output = np.zeros_like(im1)
        (x, y) = im1.shape
        seam = []
        for i in range(x):
            for j in range(y):
                if im1[i][j] == 0 and out[i][j] == 0:
                    output[i][j] = 0
                elif im1[i][j] == 0:
                    output[i][j] = out[i][j]
                elif out[i][j] == 0:
                    output[i][j] = (im1[i][j])
                else:
                    output[i, j] = energy(im1, out, 128, 2, i, j)
                    seam.append([i, j])
        error = 0
        for [x, y] in seam:
            error += np.sum((output[x - 8:x + 9, y - 8:y + 9] -
                             im1[x - 8:x + 9, y - 8:y + 9])**2 +
                            (output[x - 8:x + 9, y - 8:y + 9] -
                             out[x - 8:x + 9, y - 8:y + 9])**2)
        if error / len(seam) < mi:
            mi = error / len(seam)
            final_image = np.copy(output)

    out = cv2.warpPerspective(im2, scipy.linalg.inv(out_arr[best]),
                              (im1.shape[1], im1.shape[0]))
    output = np.zeros_like(im1)
    (x, y) = im1.shape
    seam = []
    for i in range(x):
        for j in range(y):
            if im1[i][j] == 0 and out[i][j] == 0:
                output[i][j] = 0
            elif im1[i][j] == 0:
                output[i][j] = out[i][j]
            elif out[i][j] == 0:
                output[i][j] = (im1[i][j])
            else:
                output[i][j] = (int(int(im1[i][j]) + int(out[i][j])) / 2)
    final_image = np.copy(output)

    plt.subplot(2, 2, 1)
    plt.axis('off')
    plt.imshow(im1_copy, cmap='gray')

    plt.subplot(2, 2, 2)
    plt.axis('off')
    plt.imshow(im2, cmap='gray')

    plt.subplot(2, 2, 3)
    plt.axis('off')
    plt.imshow(final_image, cmap='gray')

    plt.subplot(2, 2, 4)
    plt.axis('off')
    plt.imshow(out, cmap='gray')

    plt.show()

    cv2.imwrite('result2.jpg', final_image)
Пример #6
0
    for i in xrange(iterations):
        res = iterate(data, margin, res)
        # print(res)

    return res


def line(m, n):
    return lambda x: int(round(m * x + n))


# t = whitePoints[0] - whitePoints[30]

# plt.scatter(whiteX, whiteY)
resL = ransac(whitePointsLeft, 2, 200)
resR = ransac(whitePointsRight, 2, 200)
fnL = line(resL[0], resL[1])
fnR = line(resR[0], resR[1])

# print(res)

# plt.show()
# print(whitePoints)

lP1 = (0, fnL(0))
lP2 = (width, fnL(width))

rP1 = (0, fnR(0))
rP2 = (width, fnR(width))
Пример #7
0
    def plotnoGui(self):

        pylab.figure(1)

        pylab.xlabel('Time(s)')
        pylab.ylabel('Offsets(s)')

        pylab.plot(self.seconds, self.offsets)
        pylab.title("Offsets Generated from a NTP trial run")
        pylab.grid(True)

       # xMin = 0
       # xMax = (float)(self.offsets[-1])


#        xMin = (int)((floor)(xMin))

 #       xMax = (int)((floor)(xMax))

#        pylab.xlim([xMin,xMax])

        toPlot = []
        toPlotTime = []

    #    for i in range(len(self.seconds)):
        #    if i%14 == 0:
             #   ct = ctime(self.seconds[i])
              #  ct = ct.split(' ')
             #   toPlot.append(ct[3])
              #  toPlotTime.append(self.seconds[i])

        #pylab.xticks(toPlotTime, toPlot )

        pylab.savefig('offsets.pdf')

        pylab.figure(2)

      #  pylab.hist(self.offsets, histtype='step')
       # pylab.title("Histogram of the Offsets")

        pylab.xlabel('Seconds(s)')
        pylab.ylabel('Residuals(s)')


        secArray = np.asarray(self.seconds)
        offArray = np.asarray(self.offsets)
        a,b = np.polyfit(secArray,offArray,1)



        residualsArray =  offArray-(a*secArray+b)

        pylab.plot(secArray,residualsArray , '--k')

        pylab.savefig('residuals.pdf')

        pylab.figure(3)

        pylab.xlabel(r'$\tau$ - sec')
        pylab.ylabel(r'$\sigma(\tau$)')
        pylab.title('Allan Standard Deviation')




        pylab.loglog(self.timeS, self.av, 'b^', self.timeS, self.av)
        pylab.errorbar(self.timeS, self.av,yerr=self.error,fmt='k.' )

        pylab.legend(('ADEV points', 'ADEV'))


        pylab.grid(True)

        pylab.savefig('adev.pdf')


        pylab.figure(4)

        pylab.xlabel('Seconds(s)')
        pylab.ylabel('Residuals(s)')


        offsetsMod = zeros(9)
        offs = np.asarray(self.offsets)
        offsetsMod = np.concatenate((offsetsMod,offs))

        offsetsAgain =[]
        secAgain = []

        #Checking the Allan Deviation for Abnormal Values

        for i in range(10,len(self.offsets)):
            [time1, av1, err1] = allantest.Allan.allanDevMills(offsetsMod[i-10:i])

            if (av1[0] > 0.00020):
                pass
            else:
                offsetsAgain.append(self.offsets[i-10])
                secAgain.append(self.seconds[i-10])

        # New tests for the Middle Data of the Run Ubuntu Estimator

        offMidMod = zeros(9)
        offsMiddle =  self.offsets[250:301]
        offMidMod = np.concatenate((offMidMod,offsMiddle))

        offsetsMiddle = np.asarray(self.offsets)
        secMiddle = np.asarray(self.seconds)

        for i in range(10,len(offsMiddle)):
            [time2, av2, err2] = allantest.Allan.allanDevMills(offMidMod[i-10:i])



        secArray = np.asarray(secAgain)
        offArray = np.asarray(offsetsAgain)
        a,b = np.polyfit(secArray,offArray,1)



        residualsArray =  offArray-(a*secArray+b)

        pylab.plot(secArray,residualsArray , '--k')

        pylab.savefig('Residualscorrected.pdf')

        pylab.figure(5)

        [time1, av1, err1] = allantest.Allan.allanDevMills(offsetsAgain)

        pylab.xlabel(r'$\tau$ - sec')
        pylab.ylabel(r'$\sigma(\tau$)')
        pylab.title('Allan Standard Deviation')




        pylab.loglog(time1, av1, 'b^', time1, av1)
        pylab.errorbar(time1, av1,yerr=err1,fmt='k.' )

        pylab.legend(('ADEV points', 'ADEV'))


        pylab.grid(True)

        pylab.savefig('Allancorrected.pdf')

        pylab.figure(6)

        [time1, av1, err1] = allantest.Allan.allanDevMills(offsetsAgain)

        pylab.xlabel(r'$\tau$ - sec')
        pylab.ylabel(r'$\sigma(\tau$) - PPM')
        pylab.title('Allan Standard Deviation')




        pylab.loglog(time1, np.asarray(av1)*1e6, 'b^', time1, np.asarray(av1) *1e6)
        pylab.errorbar(time1, np.asarray(av1)*1e6,yerr=np.asarray(err1)*1e6,fmt='k.')

        pylab.legend(('ADEV points', 'ADEV'))


        pylab.grid(True)



        pylab.savefig('AllancorrectedPPM.pdf')

        pylab.figure(7)

        storageOFFs = []

        div = 3

        secAgainT = secAgain[0:(len(secAgain)/div)]

        offsetsAgainT = offsetsAgain[0:(len(offsetsAgain)/div)]

        secArray = np.asarray(secAgainT)
        offArray = np.asarray(offsetsAgainT)

        a,b = np.polyfit(secArray,offArray,1)
        # Using only 1/8 of the data
        offArray2 = offsetsAgain[(len(secAgain)/div)::8]
        secArray2 = secAgain[(len(secAgain)/div)::8]


        pylab.xlabel('Seconds(s)')
        pylab.ylabel('Corrected Offset(s)')



        for i in range(len(offArray2)):





            correction = a*secArray2[i] + b
            offNew = offArray2[i] - correction
            storageOFFs.append(offNew)

            secAgainT.pop(0)
            offsetsAgainT.pop(0)

            secAgainT.append(secArray2[i])
            offsetsAgainT.append(offArray2[i])


            secArray = np.asarray(secAgainT)
            offArray = np.asarray(offsetsAgainT)

            a,b = np.polyfit(secArray,offArray,1)


        pylab.plot(secArray2,storageOFFs)


        pylab.grid(True)

        pylab.savefig('corrected.pdf')






        pylab.figure(8)


        pylab.plot(secMiddle,offsetsMiddle)

        pylab.xlabel('Seconds(s)')
        pylab.ylabel('Corrected Offset(s)')


        pylab.figure(9)

        offsetsAgain =[]
        secAgain = []

        #Checking the Allan Deviation for Abnormal Values

        for i in range(10,len(self.offsets)):
            [time1, av1, err1] = allantest.Allan.allanDevMills(offsetsMod[i-10:i])

            if (av1[0] > 0.00035):
                pass
            else:
                offsetsAgain.append(self.offsets[i-10])
                secAgain.append(self.seconds[i-10])


        secArray = np.asarray(secAgain)
        offArray = np.asarray(offsetsAgain)
        a,b = np.polyfit(secArray,offArray,1)



        residualsArray =  offArray-(a*secArray+b)

        #diffs = diff(residualsArray)
        diffs = zeros(len(residualsArray)-1)
        # d = 0
        offinho = offArray
        for i in range(1,len(offinho)):

            diffs[i-1] = offinho[i] - offinho[i-1]



            if diffs[i-1] > 3*av2[0]:



                if(self.timeStep1):
                    offinho[i-1] =  av2[0]*1024
                    offinho[i] = offinho[i] - offinho[i-1]


                self.timeStep1=True

            else:

                self.timeStep1 = False


            #        while(d < len(diffs)):
            #
            #            if(self.timeStep1):
            #                diffs[d-1] =  av2[0]
            #                temp = diff(diffs[d-1:d+1])
            #                print temp
            #                diffs[d] = temp
            #                print diffs[d], av2[0]
            #
            #
            #            if(diffs[d] > 3*av2[0]):
            #
            #                self.timeStep1 = True
            #
            #
            #
            #            else:
            #                self.timeStep1 = False
            #
            #
            #
            #
            #
            #
            #
            #
            #                #diffs = np.delete(diffs, d)
            #                #offsetsMiddle = np.delete(offsetsMiddle, d)
            #                #secMiddle = np.delete(secMiddle,d)
            #
            #            d += 1
            ##
            ##            if(diffs[d] > 3*av2[0]):
            ##
            ##                diffs = np.delete(diffs, d)
            ##                offsetsMiddle = np.delete(offsetsMiddle, d)
            ##                secMiddle = np.delete(secMiddle,d)
            ##
            ##            d += 1


        pylab.plot(secArray,offinho , '--k')

        pylab.figure(13)

        [time1, av1, err1] = allantest.Allan.allanDevMills(diffs)

        pylab.xlabel(r'$\tau$ - sec')
        pylab.ylabel(r'$\sigma(\tau$)')
        pylab.title('Allan Standard Deviation')


        pylab.loglog(time1, av1, 'b^', time1, av1)
        pylab.errorbar(time1, av1,yerr=err1,fmt='k.' )

        pylab.legend(('ADEV points', 'ADEV'), loc=2)


        pylab.grid(True)

        pylab.figure(10)


        secArray = np.asarray(self.seconds)
        offArray = np.asarray(self.offsets)

        #offArray = offArray-offArray[0]

        a,b = np.polyfit(secArray,offArray,1)



        residualsArray =  offArray-(a*secArray+b)



        ###################
        # Testing Ransac Fit for the Data

        residualsArray.shape = (len(residualsArray),1)
        secArray.shape = (len(secArray),1)
        offArray.shape = (len(offArray),1)
        n_inputs = 1

        n_outputs = 1

        all_data = numpy.hstack( (secArray,offArray) )


        input_columns = range(n_inputs)

        output_columns = [n_inputs+i for i in range(n_outputs)]

        model = LinearLeastSquaresModel(input_columns,output_columns)

        ransac_fit, ransac_data = ransac(all_data,model,90, 1000, 10, 10, debug=1,return_all=True)

        linear_fit,resids,rank,s = scipy.linalg.lstsq(all_data[:,input_columns],
            all_data[:,output_columns])

        sort_idxs = numpy.argsort(secArray[:,0])
        ar1_col0_sorted = secArray[sort_idxs] # maintain as rank-2 array
# numpy.dot(ar1_col0_sorted, linear_fit) [:, 0]

        fitted_ransac = numpy.dot(ar1_col0_sorted, ransac_fit)[:, 0] + self.offsets[0]
        fitted_ransac = offArray[:,0] - fitted_ransac

        lin_fit = a*secArray+b


        pylab.plot(secArray, fitted_ransac, label='RANSAC fit')
        pylab.plot(secArray,a*secArray+b, label='linear fit')


        pylab.plot(secArray, offArray ,'k.', label='noisy data')


        pylab.legend()

        pylab.figure(11)



        offsetsAgain =[]
        secAgain = []

        #Checking the Allan Deviation for Abnormal Values

        for i in range(10,len(self.offsets)):
            [time1, av1, err1] = allantest.Allan.allanDevMills(offsetsMod[i-10:i])

            if (av1[0] > 0.00035):
                pass
            else:
                offsetsAgain.append(self.offsets[i-10])
                secAgain.append(self.seconds[i-10])

        for item in offsetsAgain:
            item = item - offsetsAgain[0]

        diffs = diff(offsetsAgain)

        diffs = np.asarray(diffs)
        offArray = np.asarray(offsetsAgain)
        secArray = np.asarray(secAgain)
        s = secArray[1:,]
        offArray.shape = (len(offArray),1)
        print s.shape, diffs.shape

        diffs.shape = (len(diffs), 1)

        s.shape = (len(s),1)


        all_data = numpy.hstack( (s,diffs) )

        #n - the minimum number of data required to fit the model
        #k - the number of iterations performed by the algorithm
        #t - a threshold value for determining when a datum fits a model
        #d - the number of close data values required to assert that a model fits well to data

        ransac_fit, ransac_data = ransac(all_data,model,20, 222, 0.2, 10, debug=1,return_all=True)
        diffs.shape = len(diffs)
        s.shape = len(s)
        a,b = np.polyfit(s,diffs,1)

        lin_fit = a*s+b

        pylab.plot(s, diffs, 'g.', label='Real Differences')
        pylab.plot(s[ransac_data['inliers']], diffs[ransac_data['inliers']], 'r.', label='RANSAC Differences')
      #  pylab.plot(s, lin_fit, 'b.')

        #pylab.plot(secArray[ransac_data['inliers']], offArray[ransac_data['inliers']], 'r.')
       # pylab.plot(secArray[ransac_data['inliers']], offArray[ransac_data['inliers']])
        #pylab.plot(secArray[ransac_data['inliers']], lin_fit[ransac_data['inliers']], 'g.')

      #  pylab.plot(secArray[ransac_data['inliers']], lin_fit[ransac_data['inliers']])

        #pylab.plot( secArray[ransac_data['inliers'],0], offArray[ransac_data['inliers'],0], 'bx', label='RANSAC data' )
        pylab.legend(loc=2)
    #alanvar com gambi
        pylab.figure(12)

        [time1, av1, err1] = allantest.Allan.allanDevMills(diffs[ransac_data['inliers']])

        pylab.xlabel(r'$\tau$ - sec')
        pylab.ylabel(r'$\sigma(\tau$)')
        pylab.title('Allan Standard Deviation')


        pylab.loglog(time1, av1, 'b^', time1, av1)
        pylab.errorbar(time1, av1,yerr=err1,fmt='k.' )

        pylab.legend(('ADEV points', 'ADEV'), loc=2)


        pylab.grid(True)

        #semGambi
        pylab.figure(14)

        for item in self.offsets:
            item = item - self.offsets[0]

        diffs = diff(self.offsets)

        diffs = np.asarray(diffs)

        offArray = np.asarray(self.offsets)
        secArray = np.asarray(self.seconds)

        s = secArray[1:,]
        offArray.shape = (len(offArray),1)

        diffs.shape = (len(diffs), 1)

        s.shape = (len(s),1)


        all_data = numpy.hstack( (s,diffs) )

        #n - the minimum number of data required to fit the model
        #k - the number of iterations performed by the algorithm
        #t - a threshold value for determining when a datum fits a model
        #d - the number of close data values required to assert that a model fits well to data

        ransac_fit, ransac_data = ransac(all_data,model,20, 222, 0.2, 10, debug=1,return_all=True)
        diffs.shape = len(diffs)
        s.shape = len(s)
        a,b = np.polyfit(s,diffs,1)

        lin_fit = a*s+b

        pylab.plot(s, diffs, 'g.', label='Real Differences')
        pylab.plot(s[ransac_data['inliers']], diffs[ransac_data['inliers']], 'r.', label='RANSAC Differences')
        #  pylab.plot(s, lin_fit, 'b.')

        #pylab.plot(secArray[ransac_data['inliers']], offArray[ransac_data['inliers']], 'r.')
        # pylab.plot(secArray[ransac_data['inliers']], offArray[ransac_data['inliers']])
        #pylab.plot(secArray[ransac_data['inliers']], lin_fit[ransac_data['inliers']], 'g.')

        #  pylab.plot(secArray[ransac_data['inliers']], lin_fit[ransac_data['inliers']])

        #pylab.plot( secArray[ransac_data['inliers'],0], offArray[ransac_data['inliers'],0], 'bx', label='RANSAC data' )
        pylab.legend(loc=2)

        #alanvar sem gambi
        pylab.figure(15)

        [time1, av1, err1] = allantest.Allan.allanDevMills(diffs[ransac_data['inliers']])

        pylab.xlabel(r'$\tau$ - sec')
        pylab.ylabel(r'$\sigma(\tau$)')
        pylab.title('Allan Standard Deviation')


        pylab.loglog(time1, av1, 'b^', time1, av1)
        pylab.errorbar(time1, av1,yerr=err1,fmt='k.' )

        pylab.legend(('ADEV points', 'ADEV'), loc=2)


        pylab.grid(True)


        pylab.show()