image = cv2.imread(args["image"]) gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) cnts = utils.get_contours(gray) cnt = max(cnts, key=cv2.contourArea) hull = cv2.convexHull(cnt) # cv2.drawContours(image,[hull],0, (255,0,0),2) # cv2.imshow("Outline", image) # cv2.waitKey() rect = cv2.minAreaRect(hull) box = np.int0(cv2.boxPoints(rect)) # cv2.drawContours(image,[box],0, (0,0,255),2) # cv2.imshow("Outline", image) # cv2.waitKey() approx = cv2.approxPolyDP(hull, 0.02 * cv2.arcLength(hull, True), True) print (len(approx)) if len(approx) == 4: box = approx.reshape(4,2) print "Poly approximation" output = transform.four_point_transform(image, box) cv2.imshow("Output", output) cv2.waitKey() cv2.destroyAllWindows() cv2.imwrite("unwarped_presentation.jpg", output)
image = cv2.imread(args["image"]) gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) cnts = utils.get_contours(gray) cnt = max(cnts, key=cv2.contourArea) hull = cv2.convexHull(cnt) # cv2.drawContours(image,[hull],0, (255,0,0),2) # cv2.imshow("Outline", image) # cv2.waitKey() rect = cv2.minAreaRect(hull) box = np.int0(cv2.boxPoints(rect)) # cv2.drawContours(image,[box],0, (0,0,255),2) # cv2.imshow("Outline", image) # cv2.waitKey() approx = cv2.approxPolyDP(hull, 0.02 * cv2.arcLength(hull, True), True) print(len(approx)) if len(approx) == 4: box = approx.reshape(4, 2) print "Poly approximation" output = transform.four_point_transform(image, box) cv2.imshow("Output", output) cv2.waitKey() cv2.destroyAllWindows() cv2.imwrite("unwarped_presentation.jpg", output)
approx = cv2.approxPolyDP(c, 0.02 * peri, True) # if our approximated contour has four points, then we # can assume that we have found our screen if len(approx) == 4: screenCnt = approx break # show the contour (outline) of the piece of paper print "STEP 2: Find contours of paper" # cv2.drawContours(image, [screenCnt], -1, (0, 255, 0), 2) # cv2.imshow("Outline", image) # cv2.waitKey(0) # cv2.destroyAllWindows() # apply the four point transform to obtain a top-down # view of the original image warped = four_point_transform(orig, screenCnt.reshape(4, 2) * ratio) # convert the warped image to grayscale, then threshold it # to give it that 'black and white' paper effect warped = cv2.cvtColor(warped, cv2.COLOR_BGR2GRAY) warped = utils.adaptive_threshold(warped) warped = warped.astype("uint8") # show the original and scanned images print "STEP 3: Apply perspective transform" # cv2.imshow("Original", utils.image_resize(orig, height = 650)) cv2.imshow("Scanned", utils.image_resize(warped, height=650)) cv2.waitKey(0)
# if our approximated contour has four points, then we # can assume that we have found our screen if len(approx) == 4: screenCnt = approx break # show the contour (outline) of the piece of paper print "STEP 2: Find contours of paper" # cv2.drawContours(image, [screenCnt], -1, (0, 255, 0), 2) # cv2.imshow("Outline", image) # cv2.waitKey(0) # cv2.destroyAllWindows() # apply the four point transform to obtain a top-down # view of the original image warped = four_point_transform(orig, screenCnt.reshape(4, 2) * ratio) # convert the warped image to grayscale, then threshold it # to give it that 'black and white' paper effect warped = cv2.cvtColor(warped, cv2.COLOR_BGR2GRAY) warped = utils.adaptive_threshold(warped) warped = warped.astype("uint8") # show the original and scanned images print "STEP 3: Apply perspective transform" # cv2.imshow("Original", utils.image_resize(orig, height = 650)) cv2.imshow("Scanned", utils.image_resize(warped, height=650)) cv2.waitKey(0)