def __init__(self): super(BioID, self).__init__('BioID', 'faces/BioID/') point_names = ['right_eye_pupil', 'left_eye_pupil', 'mouth_right_corner', 'mouth_left_corner', 'right_eyebrow_outer_end', 'right_eyebrow_inner_end', 'left_eyebrow_inner_end', 'left_eyebrow_outer_end', 'face_left', 'right_eye_outer_corner', 'right_eye_inner_corner', 'left_eye_inner_corner', 'left_eye_outer_corner', 'face_right', 'nose_tip', 'right_nostril', 'left_nostril', 'mouth_top_lip', 'mouth_bottom_lip', 'chin_center'] self.lstImages = [] self.keyPointsDict = [] img_dir = 'BioID-FaceDatabase-V1.2/' self.lstImages = [os.path.join(img_dir, 'BioID_' + str(i).zfill(4) + '.pgm') for i in range(1520)] points_dir = 'points_20/' points_path = os.path.join(self.absolute_base_directory, points_dir) points = read_points(self.lstImages, lambda x: x[-14:-3].lower() + 'pts', directory=points_path) #Note: all points are present for all images in this dataset. #If this assumption is incorrect, point names to coordinates will #be incorrect. for img_points in points: self.keyPointsDict.append({}) prev_coord = None for idx, point in enumerate(img_points): if idx % 2 == 0: prev_coord = point else: self.keyPointsDict[-1][point_names[idx//2]] = (prev_coord, point) prev_coord = None
def run_bayesian_learning(points_filename, som_centers_filename, activity_centers_num, path_iterations_num): data_points = hp.read_points(points_filename) activity_vectors = np.array(find_activity_vectors(data_points)) # prototypical activities - centers of clusters of activities # prototypical_activities - vector of data length where each element is index of closest prototypical activity activity_indices, prototypical_activities = kmeans.kmeans(activity_centers_num, activity_vectors) prototypical_states = hp.read_points(som_centers_filename) # e-step assigns each state to the closest prototypical state # prototypical_states - vector of data length where each element is index of closest prototypical state state_indices = kmeans.e_step(prototypical_states, data_points) probability_matrix = find_probability_matrix(state_indices, activity_indices, prototypical_states, prototypical_activities) path = np.array( generate_path(probability_matrix, prototypical_states, prototypical_activities, state_indices, path_iterations_num)) return path
def run_bayesian_learning(points_filename, som_centers_filename, activity_centers_num, path_iterations_num): data_points = hp.read_points(points_filename) activity_vectors = np.array(find_activity_vectors(data_points)) # prototypical activities - centers of clusters of activities # prototypical_activities - vector of data length where each element is index of closest prototypical activity activity_indices, prototypical_activities = kmeans.kmeans( activity_centers_num, activity_vectors) prototypical_states = hp.read_points(som_centers_filename) # e-step assigns each state to the closest prototypical state # prototypical_states - vector of data length where each element is index of closest prototypical state state_indices = kmeans.e_step(prototypical_states, data_points) probability_matrix = find_probability_matrix(state_indices, activity_indices, prototypical_states, prototypical_activities) path = np.array( generate_path(probability_matrix, prototypical_states, prototypical_activities, state_indices, path_iterations_num)) return path
def __init__(self): super(BioID, self).__init__('BioID', 'faces/BioID/') point_names = [ 'right_eye_pupil', 'left_eye_pupil', 'mouth_right_corner', 'mouth_left_corner', 'right_eyebrow_outer_end', 'right_eyebrow_inner_end', 'left_eyebrow_inner_end', 'left_eyebrow_outer_end', 'face_left', 'right_eye_outer_corner', 'right_eye_inner_corner', 'left_eye_inner_corner', 'left_eye_outer_corner', 'face_right', 'nose_tip', 'right_nostril', 'left_nostril', 'mouth_top_lip', 'mouth_bottom_lip', 'chin_center' ] self.lstImages = [] self.keyPointsDict = [] img_dir = 'BioID-FaceDatabase-V1.2/' self.lstImages = [ os.path.join(img_dir, 'BioID_' + str(i).zfill(4) + '.pgm') for i in range(1520) ] points_dir = 'points_20/' points_path = os.path.join(self.absolute_base_directory, points_dir) points = read_points(self.lstImages, lambda x: x[-14:-3].lower() + 'pts', directory=points_path) #Note: all points are present for all images in this dataset. #If this assumption is incorrect, point names to coordinates will #be incorrect. for img_points in points: self.keyPointsDict.append({}) prev_coord = None for idx, point in enumerate(img_points): if idx % 2 == 0: prev_coord = point else: self.keyPointsDict[-1][point_names[idx // 2]] = (prev_coord, point) prev_coord = None
def learn_som(input_file, centers_number, iterations_number): som1 = SOM(centers_number) points = hp.read_points(input_file) xs, ys, zs = points[:, 0], points[:, 1], points[:, 2] for iteration in range(1, iterations_number): for i in range(0, len(xs)): som1.update(xs[i], ys[i], zs[i], iteration, iterations_number) # som1.som_print() somx = [] somy = [] somz = [] for i in range(0, len(som1.list)): somx.append(som1.list[i].x) somy.append(som1.list[i].y) somz.append(som1.list[i].z) output_file = "som_centers_" + str(centers_number) hp.write_points(np.array(somx), np.array(somy), np.array(somz), output_file) return somx, somy, somz
args = parser.parse_args() if args.random: # to avoid 'ValueError("sample larger than population")' when call # random.sample(). r_end = 100 if args.random >= r_end: r_end = args.random + 10 x_axes = random.sample(xrange(1, r_end), args.random) y_axes = random.sample(xrange(1, r_end), args.random) points = zip(x_axes, y_axes) else: from helper import read_points points = read_points(args.file) convex_hull = graham_scan(points) print 'origin points:\t', points print 'convex hull:\t', convex_hull if args.plot: import matplotlib.pyplot as plt for p in points: color = 'r' if p in convex_hull else 'b' plt.plot(p[0], p[1], color + 'o') h_x_axes = [ e[0] for e in convex_hull ] h_y_axes = [ e[1] for e in convex_hull ]
import bayesian_learning as bl import som import helper as hp import os.path if __name__ == '__main__': # STRONGLY recommended to choose q3dm1-path1.csv point_maps = ["q3dm1-path1.csv", "q3dm1-path2.csv"] map_index = input( "Enter:\n0 for q3dm1-path1.csv, \n1 for q3dm1-path2.csv") points_filename = point_maps[map_index] print points_filename state_centers_num = input("Enter number of som centers (reasonable: 40-100, max 1300): ") activity_centers_num = input("Enter number of activity centers (reasonable: 40-100, max 1300): ") som_iterations_num = input("Enter number of learning som iterations, for this some 10 - 20 is enough: ") path_iterations_num = input("Enter number of steps to generate path (e.g 300, 500 ...): ") som_centers_filename = "som_centers_" + str(state_centers_num) if os.path.isfile(som_centers_filename) == False: # if there is no yet file with same number of som centers print "Learning started" som.learn_som(points_filename, state_centers_num, som_iterations_num) # learn som print "Learning finished" path = bl.run_bayesian_learning(points_filename, som_centers_filename, activity_centers_num, path_iterations_num) data_points = hp.read_points(points_filename) hp.plot_points(data_points[:, 0], data_points[:, 1], data_points[:, 2], "original path") som_centers = hp.read_points(som_centers_filename) hp.plot_points(som_centers[:, 0], som_centers[:, 1], som_centers[:, 2], "som centers") hp.plot_points(path[:, 0], path[:, 1], path[:, 2], "generated path")
points_filename = point_maps[map_index] print points_filename state_centers_num = input( "Enter number of som centers (reasonable: 40-100, max 1300): ") activity_centers_num = input( "Enter number of activity centers (reasonable: 40-100, max 1300): ") som_iterations_num = input( "Enter number of learning som iterations, for this some 10 - 20 is enough: " ) path_iterations_num = input( "Enter number of steps to generate path (e.g 300, 500 ...): ") som_centers_filename = "som_centers_" + str(state_centers_num) if os.path.isfile( som_centers_filename ) == False: # if there is no yet file with same number of som centers print "Learning started" som.learn_som(points_filename, state_centers_num, som_iterations_num) # learn som print "Learning finished" path = bl.run_bayesian_learning(points_filename, som_centers_filename, activity_centers_num, path_iterations_num) data_points = hp.read_points(points_filename) hp.plot_points(data_points[:, 0], data_points[:, 1], data_points[:, 2], "original path") som_centers = hp.read_points(som_centers_filename) hp.plot_points(som_centers[:, 0], som_centers[:, 1], som_centers[:, 2], "som centers") hp.plot_points(path[:, 0], path[:, 1], path[:, 2], "generated path")