def filter_trajectories(): print "Entered function to filter trajectories: BEEF" unfiltered = model_datastore.get_all_location_updates() filtered_trajectories = \ model_datastore.filter_trajectories(trajectories=unfiltered) model_datastore.store_filtered_trajectories(filtered_trajectories=filtered_trajectories) return
def get_normalized_datastore_trajectories(): raw_trajectories = model_datastore.get_all_location_updates() print "HERE ARE THE RAW TRAJECTORIES: \n" + str(raw_trajectories) def dict_list_to_point_list(dict_list): return map(lambda x: Point(x['lat'], x['lng']), dict_list) normal_traj_lists = model_datastore.get_raw_trajectories() print "\n\nHERE ARE THE NORMALIZED FORMAT TRAJECTORIES: \n" + str(normal_traj_lists) print "\n LENGTH OF NORMAL TRAJ LIST IS " + str(len(normal_traj_lists)) all_raw_point_lists = [] for dict_list in normal_traj_lists: print "here is what we're trying to go from dict list to point list on: " \ + str(dict_list) point_list = dict_list_to_point_list(dict_list) if len(point_list) < 2: continue traj_list = remove_successive_points_at_same_spots(point_list) if len(traj_list) >= 2: all_raw_point_lists.append(traj_list) if len(all_raw_point_lists) <= 1: raise ValueError("length of all raw point lists is " + \ str(len(all_raw_point_lists))) def get_min_dist(): min_dist = 1000 for traj in all_raw_point_lists: p_iter = iter(traj) prev = p_iter.next() for p in p_iter: min_dist = min(min_dist, prev.distance_to(p)) return min_dist print "min dist is " + str(get_min_dist()) def get_scaled_trajs(traj_list, scale): def scale_coordinates(point_list): return map(lambda p: Point(p.x * scale, p.y * scale), point_list) return map(scale_coordinates, traj_list) all_raw_point_lists = get_scaled_trajs(all_raw_point_lists, COORDINATE_SCALER) all_raw_point_lists = map(remove_points_too_close, all_raw_point_lists) all_raw_point_lists = filter(lambda x: len(x) >= 2, all_raw_point_lists) return all_raw_point_lists