Esempio n. 1
0
def transform_base_on_eye_pairs(image, region_info, region_skin_image, eye_pairs,
		m, n, tempX, tempY, file_output):
	faceBorder = find_longest_border(region_skin_image, m, n, tempX, tempY)
	downVector = [0, 1]
	# count = 0

	for eye1, eye2 in eye_pairs:
		centroid1 = [(eye1[1] + eye1[3]) / 2, (eye1[0] + eye1[2]) / 2]
		centroid2 = [(eye2[1] + eye2[3]) / 2, (eye2[0] + eye2[2]) / 2]
		pivot = [(centroid1[0] + centroid2[0]) / 2, (centroid1[1] + centroid2[1]) / 2]

		direction = get_face_direction(region_skin_image, m, n, tempX, tempY, faceBorder, centroid1, centroid2, pivot)		
		angleToRotate = ut.find_angle_between_two_vectors(downVector, direction)
		# print(count + 1, angleToRotate) 
		# print(direction)
		if (direction[0] > 0):
			angleToRotate = -angleToRotate

		#adjust pivot point to work with original image
		pivot[0] = pivot[0] + region_info[1]
		pivot[1] = pivot[1] + region_info[0]
		pivot = tuple(pivot)

		tempImage = image.copy()
		# cv2.rectangle(tempImage, (eye1[1], eye1[0]), (eye1[3], eye1[2]), (0, 255, 0), 1)
		# cv2.rectangle(tempImage, (eye2[1], eye2[0]), (eye2[3], eye2[2]), (0, 255, 0), 1)
		mat = cv2.getRotationMatrix2D(pivot, angleToRotate, 1.0)
		tempImage = cv2.warpAffine(tempImage, mat, tempImage.shape[1::-1])

		#print(tempImage[1,1,1])
		#count += 1
		value = split_to_get_face(tempImage, pivot, ut.distance_between_points(centroid1, centroid2), 
			file_output)
def transform_base_on_eye_pairs(region_image, region_skin_image, eye_pairs,
		m, n, tempX, tempY, directory):
	faceBorder = find_longest_border(region_skin_image, m, n, tempX, tempY)
	downVector = [0, -1]
	count = 0

	for eye1, eye2 in eye_pairs:
		centroid1 = [(eye1[0] + eye1[2]) / 2, (eye1[1] + eye1[3]) / 2]
		centroid2 = [(eye2[0] + eye2[2]) / 2, (eye2[1] + eye2[3]) / 2]

		pivot = [(centroid1[0] + centroid2[0]) / 2, (centroid1[1] + centroid2[1]) / 2]

		direction = get_face_direction(region_skin_image, m, n, tempX, tempY, faceBorder, centroid1, centroid2, pivot)
		angleToRotate = ut.find_angle_between_two_vectors(downVector, direction)
		if (direction[0] < 0):
			angleToRotate = -angleToRotate

		tempImage = region_image.copy()
		tempSkinImage = region_skin_image.copy()

		mat = cv2.GetRotationMatrix(pivot, angleToRotate, 1.0)
		tempImage = cv2.warpAffine(tempImage, mat, tempImage.shape[1::-1])
		tempSkinImage = cv2.warpAffine(tempSkinImage, mat, tempImage.shape[1::-1])

		count += 1
		cv2.imwrite((directory + "/hello%s.jpg") % count, tempImage)
		cv2.imwrite((directory + "/skin%s.jpg") % count, tempSkinImage)
def get_face_direction(region_skin_image, m, n, tempX, tempY, border, eye1, eye2, pivot):
	'''based on nose and mouth'''
	line = ut.find_line(eye1, eye2)
	
	farthestPoint = ut.find_the_farthest_point(line, border)
	specialVector = [farthestPoint[0] - point[0], farthestPoint[1] - point[1]]

	perpendicularVector = [eye1[1] - eye2[1], eye2[0] - eye1[0]]
	if (ut.find_angle_between_two_vectors(perpendicularVector, specialVector) < 90):
		return perpendicularVector
	return [eye2[1] - eye1[1], eye1[0] - eye2[0]]
Esempio n. 4
0
def get_faces_base_on_eye_pairs(system_data, image, region_info,
                                region_skin_image, eye_pairs, m, n, tempX,
                                tempY):
    faceBorder = find_longest_border(region_skin_image, m, n, tempX, tempY)
    downVector = [0, 1]
    # count = 0
    # minDist = 100000000
    # minImage = None

    result = []

    for eye1, eye2 in eye_pairs:
        centroid1 = [(eye1[1] + eye1[3]) / 2, (eye1[0] + eye1[2]) / 2]
        centroid2 = [(eye2[1] + eye2[3]) / 2, (eye2[0] + eye2[2]) / 2]
        pivot = [(centroid1[0] + centroid2[0]) / 2,
                 (centroid1[1] + centroid2[1]) / 2]

        direction = get_face_direction(region_skin_image, m, n, tempX, tempY,
                                       faceBorder, centroid1, centroid2, pivot)
        angleToRotate = ut.find_angle_between_two_vectors(
            downVector, direction)
        # print(count + 1, angleToRotate)
        # print(direction)
        if (direction[0] > 0):
            angleToRotate = -angleToRotate

        #adjust pivot point to work with original image
        pivot[0] = pivot[0] + region_info[1]
        pivot[1] = pivot[1] + region_info[0]
        pivot = tuple(pivot)

        tempImage = image.copy()
        # cv2.rectangle(tempImage, (eye1[1], eye1[0]), (eye1[3], eye1[2]), (0, 255, 0), 1)
        # cv2.rectangle(tempImage, (eye2[1], eye2[0]), (eye2[3], eye2[2]), (0, 255, 0), 1)
        mat = cv2.getRotationMatrix2D(pivot, angleToRotate, 1.0)
        tempImage = cv2.warpAffine(tempImage, mat, tempImage.shape[1::-1])

        tempImage = split_to_get_face(
            tempImage, pivot, ut.distance_between_points(centroid1, centroid2))
        if (tempImage is None):
            continue
        check, dist = pca.detect_face(
            tempImage,
            system_data.mean,
            system_data.eigenfaces,
            dist_threshold=system_data.detectionThreshold)

        print("DETECTION DIST", dist)
        # cv2.imshow("value", tempImage)
        # cv2.waitKey(0)
        # cv2.destroyWindow("value")

        if (check):
            # if (dist < minDist):
            # 	minDist = dist
            # 	minImage = tempImage

            result.append([dist, tempImage])

    # return minDist, minImage
    return result