Beispiel #1
0
def extract_bill(image, screen, ratio):
    """"Extract the bill of the image"""
    warped = four_point_transform(image, screen.reshape(4, 2) * ratio)

    # convert the warped image to grayscale, then threshold it
    # to give it that 'black and white' paper effect
    warped = cv2.cvtColor(warped, cv2.COLOR_BGR2GRAY)
    warped = threshold_adaptive(warped, 250, offset=10)
    warped = warped.astype("uint8") * 255
    return warped
Beispiel #2
0
def warpedfoo(image, pts):
	# apply the four point tranform to obtain a "birds eye view" of
	# the image
	warped = four_point_transform(image, pts)
	 
	# show the original and warped images
	#cv2.imshow("Original", image)
	#cv2.imshow("Warped", warped)
	cv2.waitKey(0)
	return warped
    approx = cv2.approxPolyDP(c, 0.02 * peri, True)
    print(len(approx))

    # if our approximated contour has four points, then we
    # # can assume that we have found our screen
    if len(approx) == 4:
        screenCnt = approx
        break

# show the contour (outline) of the piece of paper
print("STEP 2: Find contours of paper")
cv2.drawContours(image, [screenCnt], -1, (0, 255, 0), 2)
cv2.imshow("Outline", image)
cv2.waitKey(0)
cv2.destroyAllWindows()

# apply the four point transform to obtain a top-down
# view of the original image
warped = four_point_transform(orig, screenCnt.reshape(4, 2) * ratio)

# convert the warped image to grayscale, then threshold it
# to give it that 'black and white' paper effect
warped = cv2.cvtColor(warped, cv2.COLOR_BGR2GRAY)
T = threshold_local(warped, 11, offset=10, method="gaussian")
warped = (warped > T).astype("uint8") * 255

# show the original and scanned images
print("STEP 3: Apply perspective transform")
cv2.imshow("Original", imutils.resize(orig, height=650))
cv2.imshow("Scanned", imutils.resize(warped, height=650))
cv2.waitKey(0)
Beispiel #4
0
def warpedfoo(image, pts):
	# apply the four point tranform to obtain a "birds eye view" of
	# the image
	warped = four_point_transform(image, pts)
	return warped
Beispiel #5
0
from pyimagesearch import four_point_transform
from skimage.filters import threshold_local
import numpy as np
import argparse
import cv2
import imutils

#load the image and grab the source coordinates (i.e. the list of
# of (x, y) points)
# NOTE: using the 'eval' function is bad form, but for this example
# let's just roll with it -- in future posts I'll show you how to
# automatically determine the coordinates without pre-supplying them
image = cv2.imread("/home/caratred/aadhar.jpeg")
pts = np.array(eval("/home/caratred/aadhar.jpeg"), dtype="float32")

# apply the four point tranform to obtain a "birds eye view" of
# the image
warped = four_point_transform(image, pts)

# show the original and warped images
cv2.imshow("Original", image)
cv2.imshow("Warped", warped)
cv2.waitKey(0)
Beispiel #6
0
contours = imutils.grab_contours(contours)
contours = sorted(contours, key = cv2.contourArea, reverse = True)[:5]

#loop over contours
for contour in contours:
	#approximate the contour
	peri = cv2.arcLength(contour, True)
	approx = cv2.approxPolyDP(contour, 0.02 * peri, True)

	#if our approximated contour has four pionts, then we can assume that we have found the screen
	if len(approx) == 4:
		screenContour = approx
		break

#show the contour outline on the image
print("[INFO]: Step 2 - Finding contours in image")
cv2.drawContours(img, [screenContour], -1, (50, 50, 200), 3)
cv2.imshow("Outline Contours in Image", img)
cv2.waitKey(0)

#apply document transform to the image
warped = four_point_transform(original_img, screenContour.reshape(4,2) * ratio)

warped = cv2.cvtColor(warped, cv2.COLOR_BGR2GRAY)
T = threshold_local(warped, 11, offset = 10, method = "gaussian")
warped = (warped > T).astype("uint8") * 255

print ("INFO: STEP 3 - Apply perspective transform")
cv2.imshow("original", imutils.resize(original_img, height = 500))
cv2.imshow("scanned", imutils.resize(warped, height = 500))
cv2.waitKey(0)