def find_median(paths): """ :returns: tuple of (median path: str, error) """ paths = list(paths) distance_sum = [0] * len(paths) for index, path in enumerate(paths): distance_sum[index] = sum( SOM.path_distance(path, other_path) for other_path in paths) # XXX What to do if there are more minims? median = min_index(distance_sum) return paths[median[0]][PATH_CATEGORY], median[1]
def gaussian_kernel_cross_validation(training_set, testing_set, gammas, sigmas, k_fold): """ Runs Gaussian kernel ridge regression using k-fold cross validation Parameters ---------- training_set: ndarray (m, n) Training set testing_set: ndarray (m', n) Testing set gammas: ndarray (g,) Array of regularization parameter gammas sigmas: ndarray (s,) Array of variance parameter sigmas in the Gaussian kernel k_fold: int K fold cross validation, i.e. number of disjoint sets in cross validation Returns ------- train_mse: float Training set MSE for the best gamma and sigma test_mse: float Testing set MSE for the best gamma and sigma """ dimension = training_set.shape[1] # get training and testing data (without the target output) training_data = training_set[:, 0:dimension - 1] testing_data = testing_set[:, 0:dimension - 1] data = np.vstack((training_data, testing_data)) data_size = data.shape[0] training_set_size = training_set.shape[0] # get observed output for all the data training_y = training_set[:, dimension - 1] testing_y = testing_set[:, dimension - 1] k_training_idx = np.array_split(np.arange(training_data.shape[0]), k_fold) cross_validation_errors = np.zeros((len(sigmas), len(gammas))) for s_idx in range(sigmas.shape[0]): s = sigmas[s_idx] # Gram matrix for all training points K_train_all = get_gaussian_kernel(training_data, s) for g_idx in range(gammas.shape[0]): g = gammas[g_idx] k_scores = np.zeros(k_fold) for k in range(k_fold): kth_indices = k_training_idx[k] # print("k indices", kth_indices) # k small training set (our Ktrain = Ktrain, train) k_train = np.delete(K_train_all, kth_indices, axis=0) k_train = np.delete(k_train, kth_indices, axis=1) k_training_y = np.delete(training_y, kth_indices, axis=0) a = kridgereg(k_train, k_training_y, g) # k validation set (our Kvalidation = Kvalidation, train) k_validation = K_train_all[kth_indices, :] k_validation = np.delete(k_validation, kth_indices, axis=1) k_validation_y = training_y[kth_indices] k_scores[k] = dualcost(k_validation, k_validation_y, a) cross_validation_errors[s_idx, g_idx] = np.mean(k_scores) sigma_idx, gamma_idx = utils.min_index(cross_validation_errors) best_sigma = sigmas[sigma_idx] best_gamma = gammas[gamma_idx] K_all = get_gaussian_kernel(data, best_sigma) K_train_train = K_all[0:training_set_size, :][:, 0:training_set_size] astar = kridgereg(K_train_train, training_y, best_gamma) train_mse = dualcost(K_train_train, training_y, astar) K_test_train = K_all[training_set_size:data_size, :][:, 0:training_set_size] test_mse = dualcost(K_test_train, testing_y, astar) plot_cross_validation_error(cross_validation_errors, gammas, sigmas) print("\t kernel ridge - best sigma: 2^{%.1f}" % (sigma_idx / 2 + 7)) print("\t kernel ridge - best gamma: 2^{%d}" % (best_gamma - 40)) print("\t kernel ridge - MSE train", train_mse) print("\t kernel ridge - MSE test", test_mse) return astar, train_mse, test_mse
def medoid_of_points(points: List[Point]) -> Point: cost = partial(medoid_cost, points) best_medoid_index = min_index(list(map(cost, points))) return points[best_medoid_index]
def stream(tracker, camera=0, server=0): """ @brief Captures video and runs tracking and moves robot accordingly @param tracker The BallTracker object to be used @param camera The camera number (0 is default) for getting frame data camera=1 is generally the first webcam plugged in """ ######## GENERAL PARAMETER SETUP ######## MOVE_DIST_THRESH = 20 # distance at which robot will stop moving SOL_DIST_THRESH = 150 # distance at which solenoid fires PACKET_DELAY = 1 # number of frames between sending data packets to pi OBJECT_RADIUS = 13 # opencv radius for circle detection AXIS_SAFETY_PERCENT = 0.05 # robot stops if within this % dist of axis edges packet_cnt = 0 tracker.radius = OBJECT_RADIUS ######## SERVER SETUP ######## motorcontroller_setup = False if server: # Create a TCP/IP socket sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # Obtain server address by going to network settings and getting eth ip server_address = ('169.254.171.10',10000) # CHANGE THIS #server_address = ('localhost', 10000) # for local testing print 'starting up on %s port %s' % server_address sock.bind(server_address) sock.listen(1) connection, client_address = None, None while True: # Wait for a connection print 'waiting for a connection' connection, client_address = sock.accept() break ######## CV SETUP ######## # create video capture object for #cap = cv2.VideoCapture(camera) cap = WebcamVideoStream(camera).start() # WEBCAM #cap = cv2.VideoCapture('../media/goalie-test.mov') #cap = cv2.VideoCapture('../media/bounce.mp4') cv2.namedWindow(tracker.window_name) # create trajectory planner object # value of bounce determines # of bounces. 0 is default (no bounces) # bounce not currently working correctly planner = TrajectoryPlanner(frames=4, bounce=0) # create FPS object for frame rate tracking fps_timer = FPS(num_frames=20) while(True): # start fps timer fps_timer.start_iteration() ######## CAPTURE AND PROCESS FRAME ######## ret, frame = True, cap.read() # WEBCAM #ret, frame = cap.read() # for non-webcam testing if ret is False: print 'Frame not read' exit() # resize to 640x480, flip and blur frame,img_hsv = tracker.setup_frame(frame=frame, w=640,h=480, scale=1, blur_window=15) ######## TRACK OBJECTS ######## # use the HSV image to get Circle objects for robot and objects. # object_list is list of Circle objects found. # robot is single Circle for the robot position # robot_markers is 2-elem list of Circle objects for robot markers object_list = tracker.find_circles(img_hsv.copy(), tracker.track_colors, tracker.num_objects) robot, robot_markers = tracker.find_robot_system(img_hsv) walls = tracker.get_rails(img_hsv, robot_markers, colors.Yellow) planner.walls = walls # Get the line/distances between the robot markers # robot_axis is Line object between the robot axis markers # points is list of Point objects of closest intersection w/ robot axis # distanes is a list of distances of each point to the robot axis robot_axis = utils.line_between_circles(robot_markers) points, distances = utils.distance_from_line(object_list, robot_axis) planner.robot_axis = robot_axis ######## TRAJECTORY PLANNING ######## # get closest object and associated point, generate trajectory closest_obj_index = utils.min_index(distances) # index of min value closest_line = None closest_pt = None if closest_obj_index is not None: closest_obj = object_list[closest_obj_index] closest_pt = points[closest_obj_index] closest_line = utils.get_line(closest_obj, closest_pt) # only for viewing planner.add_point(closest_obj) # Get trajectory - list of elements for bounces, and final line traj # Last line intersects with robot axis traj_list = planner.get_trajectory_list(colors.Cyan) traj = planner.traj ######## SEND DATA TO CLIENT ######## if packet_cnt != PACKET_DELAY: packet_cnt = packet_cnt + 1 else: packet_cnt = 0 # reset packet counter # error checking to ensure will run properly if len(robot_markers) is not 2 or robot is None or closest_pt is None: pass elif server: try: if motorcontroller_setup is False: # send S packet for motorcontroller setup motorcontroller_setup = True ######## SETUP MOTORCONTROLLER ######## axis_pt1 = robot_markers[0].to_pt_string() axis_pt2 = robot_markers[1].to_pt_string() data = 'SM '+axis_pt1+' '+axis_pt2+' '+robot.to_pt_string() print data connection.sendall(data) # setup is done, send packet with movement data else: obj_robot_dist = utils.get_pt2pt_dist(robot, closest_obj) print 'dist: ' + str(obj_robot_dist) # USE FOR CALIBRATION ######## SOLENOID ACTIVATION CODE ######## # check if solenoid should fire if obj_robot_dist <= SOL_DIST_THRESH: # fire solenoid, dont move print 'activate solenoid!' # TODO SEND SOLENOID ACTIVATE ######## MOTOR CONTROL ######## # get safety parameters rob_ax1_dist = utils.get_pt2pt_dist(robot_markers[0],robot) rob_ax2_dist = utils.get_pt2pt_dist(robot_markers[1],robot) axis_length = utils.get_pt2pt_dist(robot_markers[0], robot_markers[1]) # ensure within safe bounds of motion relative to axis # if rob_ax1_dist/axis_length <= AXIS_SAFETY_PERCENT or \ # rob_ax2_dist/axis_length <= AXIS_SAFETY_PERCENT: # # in danger zone, kill motor movement # print 'INVALID ROBOT LOCATION: stopping motor' # data = 'KM' # connection.sendall(data) # if in danger zone near axis edge, move towards other edge if rob_ax1_dist/axis_length <= AXIS_SAFETY_PERCENT: print 'INVALID ROBOT LOCATION' data = 'MM '+robot.to_pt_string()+' '+robot_markers[1].to_pt_string() elif rob_ax2_dist/axis_length <= AXIS_SAFETY_PERCENT: print 'INVALID ROBOT LOCATION' data = 'MM '+robot.to_pt_string()+' '+robot_markers[0].to_pt_string() # check if robot should stop moving elif obj_robot_dist <= MOVE_DIST_THRESH: # obj close to robot # Send stop command, obj is close enough to motor to hit data = 'KM' print data connection.sendall(data) pass # Movement code else: # far enough so robot should move #### FOR TRAJECTORY ESTIMATION # if planner.traj is not None: # axis_intersect=shapes.Point(planner.traj.x2,planner.traj.y2) # # Clamp the point to send to the robot axis # traj_axis_pt = utils.clamp_point_to_line( # axis_intersect, robot_axis) # data = 'D '+robot.to_pt_string()+' '+traj_axis_pt.to_string() # connection.sendall(data) #### FOR CLOSEST POINT ON AXIS #### if closest_pt is not None and robot is not None: # if try to move more than length of axis, stop instead if utils.get_pt2pt_dist(robot,closest_pt) > axis_length: print 'TRYING TO MOVE OUT OF RANGE' data = 'KM' print data connection.sendall(data) else: data = 'MM ' + robot.to_pt_string() + ' ' + \ closest_pt.to_string() print data connection.sendall(data) except IOError: pass # don't send anything ######## ANNOTATE FRAME FOR VISUALIZATION ######## frame = gfx.draw_lines(img=frame, line_list=walls) frame = gfx.draw_robot_axis(img=frame, line=robot_axis) # draw axis line frame = gfx.draw_robot(frame, robot) # draw robot frame = gfx.draw_robot_markers(frame, robot_markers) # draw markers frame = gfx.draw_circles(frame, object_list) # draw objects # eventually won't need to print this one frame = gfx.draw_line(img=frame, line=closest_line) # closest obj>axis # draw full set of trajectories, including bounces #frame = gfx.draw_lines(img=frame, line_list=traj_list) #frame = gfx.draw_line(img=frame, line=traj) # for no bounces frame = gfx.draw_point(img=frame, pt=closest_pt) #frame=gfx.draw_line(frame,planner.debug_line) # for debug ######## FPS COUNTER ######## fps_timer.get_fps() fps_timer.display(frame) ######## DISPLAY FRAME ON SCREEN ######## cv2.imshow(tracker.window_name,frame) # quit by pressing q if cv2.waitKey(1) & 0xFF == ord('q'): break # release capture cap.stop() # WEBCAM #cap.release() # for testing w/o webcam cv2.destroyAllWindows()
def find_nearest_centroid(centroids, metric, point: Point) -> Point: get_distance_from_point = partial(metric, point) index = min_index( strictMap(get_distance_from_point, centroids)) return centroids[index]