def __get_nearby_agent_ids(self, radius=None): if radius is None: radius = self.radius player_location = carlautil.transform_to_location_ndarray( self.player_transform) """When there are road above / below ego vehicle, then we don't want to include them.""" upperbound_z = 4 lowerbound_z = -4 other_ids = util.map_to_ndarray(lambda v: v.id, self.other_vehicles) other_locations = util.map_to_ndarray( lambda v: carlautil.transform_to_location_ndarray(v.get_transform( )), self.other_vehicles) distances = np.linalg.norm(other_locations - player_location, axis=1) z_displacements = other_locations[:, -1] - player_location[-1] df = pd.DataFrame({ 'ids': other_ids, 'distances': distances, 'z_displacements': z_displacements }) df = df[df['distances'] < radius] df = df[df['z_displacements'].between(lowerbound_z, upperbound_z, inclusive=False)] df.sort_values('distances', inplace=True) return df['ids'].to_numpy()
def extract_junction_points(self, tlight_find_radius=25.): carla_topology = self.carla_map.get_topology() junctions = carlautil.get_junctions_from_topology_graph(carla_topology) tlights = util.filter_to_list(lambda a: 'traffic_light' in a.type_id, self.carla_world.get_actors()) tlight_distances = np.zeros(( len(tlights), len(junctions), )) f = lambda j: carlautil.location_to_ndarray(j.bounding_box.location) junction_locations = util.map_to_ndarray(f, junctions) g = lambda tl: carlautil.transform_to_location_ndarray(tl. get_transform()) tlight_locations = util.map_to_ndarray(g, tlights) for idx, junction in enumerate(junctions): tlight_distances[:, idx] = np.linalg.norm(tlight_locations - junction_locations[idx], axis=1) is_controlled_junction = (tlight_distances < tlight_find_radius).any( axis=0) is_uncontrolled_junction = np.logical_not(is_controlled_junction) controlled_junction_locations \ = junction_locations[is_controlled_junction] uncontrolled_junction_locations \ = junction_locations[is_uncontrolled_junction] return util.AttrDict(controlled=controlled_junction_locations, uncontrolled=uncontrolled_junction_locations)
def __init__(self, carla_world, carla_map, debug=False): """ Parameters ---------- carla_world : carla.World carla_map : carla.Map debug : bool """ super().__init__(carla_world, carla_map, debug=debug) """Generate slope topology of the map""" # get waypoints wps = self.carla_map.generate_waypoints(4.0) def h(wp): l = wp.transform.location pitch = wp.transform.rotation.pitch return np.array([l.x, l.y, l.z, pitch]) loc_and_pitch_of_wps = util.map_to_ndarray(h, wps) self.wp_locations = loc_and_pitch_of_wps[:, :-1] self.wp_pitches = loc_and_pitch_of_wps[:, -1] self.wp_pitches = self.wp_pitches % 360. self.wp_is_sloped = np.logical_and( self.SLOPE_DEGREES < self.wp_pitches, self.wp_pitches < self.SLOPE_UDEGREES) """Generate intersection topology of the map""" # get nodes in graph topology = self.carla_map.get_topology() G = nx.Graph() G.add_edges_from(topology) tlights = util.filter_to_list(lambda a: 'traffic_light' in a.type_id, self.carla_world.get_actors()) junctions = carlautil.get_junctions_from_topology_graph(G) tlight_distances = np.zeros(( len(tlights), len(junctions), )) f = lambda j: carlautil.location_to_ndarray(j.bounding_box.location) junction_locations = util.map_to_ndarray(f, junctions) g = lambda tl: carlautil.transform_to_location_ndarray(tl. get_transform()) tlight_locations = util.map_to_ndarray(g, tlights) for idx, junction in enumerate(junctions): tlight_distances[:, idx] = np.linalg.norm(tlight_locations - junction_locations[idx], axis=1) is_controlled_junction = (tlight_distances < self.TLIGHT_FIND_RADIUS).any(axis=0) is_uncontrolled_junction = np.logical_not(is_controlled_junction) self.controlled_junction_locations \ = junction_locations[is_controlled_junction] self.uncontrolled_junction_locations \ = junction_locations[is_uncontrolled_junction]
def __process_lidar_snapshots(self, frames): """Add semantic LIDAR points from measurements to collection of points with __sensor_transform_at_t0 as the origin. Parameters ========== frames : list of int The frames to merge LIDAR measurements to collection sorted by ascending order. """ if not frames: return collection_of_points = [] collection_of_labels = [] collection_of_object_ids = [] for frame in frames: lidar_measurement = self.__lidar_feeds[frame] raw_data = lidar_measurement.raw_data mtx = np.array(lidar_measurement.transform.get_matrix()) data = np.frombuffer( raw_data, dtype=np.dtype([ ("x", np.float32), ("y", np.float32), ("z", np.float32), ("CosAngle", np.float32), ("ObjIdx", np.uint32), ("ObjTag", np.uint32), ]), ) points = np.array([data["x"], data["y"], data["z"]]).T labels = data["ObjTag"] object_ids = data["ObjIdx"] self.__lidar_snapshot_to_populate_vehicle_visibility( lidar_measurement, points, labels, object_ids) points = self.__add_1_to_points(points) points = points @ mtx.T points = points[:, :3] collection_of_points.append(points) collection_of_labels.append(labels) collection_of_object_ids.append(object_ids) if self.__sensor_loc_at_t0 is None: lidar_measurement = self.__lidar_feeds[frames[0]] loc = carlautil.transform_to_location_ndarray( lidar_measurement.transform) self.__sensor_loc_at_t0 = loc else: collection_of_points = [self.__overhead_points ] + collection_of_points collection_of_labels = [self.__overhead_labels ] + collection_of_labels collection_of_object_ids = [self.__overhead_ids ] + collection_of_object_ids self.__overhead_points = np.concatenate(collection_of_points) self.__overhead_labels = np.concatenate(collection_of_labels) self.__overhead_ids = np.concatenate(collection_of_object_ids)
def old_get_nearby_agent_ids(self, radius=None): """Gets all IDs of other vehicles that are radius away from ego vehicle and returns IDs sorted by nearest to player first. TODO: don't include other vehicles below / above th ego vehicle TODO: delete """ if radius is None: radius = self.radius player_location = carlautil.transform_to_location_ndarray( self.player_transform) other_ids = util.map_to_ndarray(lambda v: v.id, self.other_vehicles) other_locations = util.map_to_ndarray( lambda v: carlautil.transform_to_location_ndarray(v.get_transform( )), self.other_vehicles) distances = np.linalg.norm(other_locations - player_location, axis=1) df = pd.DataFrame({'ids': other_ids, 'distances': distances}) df = df[df['distances'] < radius] df.sort_values('distances', inplace=True) return df['ids'].to_numpy()
def __init__(self, frame, phi, world, other_vehicles, player_transforms, others_transforms, player_bbox, other_id_ordering=None, radius=200): """ 1. generates a history of player and other vehicle position coordinates of size len(player_transforms) Parameters ---------- frame : int Frame ID of observation world : carla.World other_vehicles : list of carla.Vehicle player_transform : collections.deque of carla.Transform Collection of transforms of player Ordered by timestep where last in deque is current timestep others_transforms : collections.deque of (dict of int : carla.Trajectory) Collection of transforms of other vehicles by vehicle ID Ordered by timestep where last in deque is current timestep A : int Number of vehicles in observation, including ego vehicle. Must be A > 1. other_id_ordering : list of int IDs of other (not player) vehicles (not walkers). All IDs in other_id_ordering belong to some vehicle ID radius : int """ _, _, self.T_past, _ = tensoru.shape(self.phi.S_past_world_frame) self.B, self.A, self.T, self.D = tensoru.shape( self.phi.S_future_world_frame) self.B, self.H, self.W, self.C = tensoru.shape( self.phi.overhead_features) assert (len(player_transforms) == len(others_transforms)) assert (self.A > 0) # player_transform : carla.Transform # transform of current player self.player_transform = player_transforms[-1] # player_positions_world : ndarray of shape (len(player_transforms), 3) self.player_positions_world = carlautil.transforms_to_location_ndarray( self.player_transforms) # player_positions_local : ndarray of shape (len(player_transforms), 3) self.player_positions_local = self.player_positions_world \ - carlautil.transform_to_location_ndarray(self.player_transform) # player_yaw : float self.player_yaw = carlautil.transform_to_yaw(self.player_transform) if self.other_id_ordering is None: # get list of A agents within radius close to us # note that other_id_ordering may have size smaller than A-1 ids = self.__get_nearby_agent_ids() self.other_id_ordering = ids[:self.A - 1] if self.other_id_ordering.shape != (0, ): others_positions_list = [None] * len(self.others_transforms) for idx, others_transform in enumerate(self.others_transforms): others_positions_list[idx] = util.map_to_list( lambda aid: carlautil.transform_to_location_ndarray( others_transform[aid]), self.other_id_ordering) # agent_positions_world : ndarray of shape (A-1, len(self.others_transforms), 3) self.agent_positions_world = np.array( others_positions_list).transpose(1, 0, 2) others_transform = self.others_transforms[-1] # agent_yaws : ndarray of shape (A-1,) self.agent_yaws = util.map_to_ndarray( lambda aid: carlautil.transform_to_yaw(others_transform[aid]), self.other_id_ordering) self.n_missing = max( self.A - 1 - self.agent_positions_world.shape[0], 0) self.has_other_agents = True else: self.agent_positions_world = np.array([]) self.agent_yaws = np.array([]) self.n_missing = self.A - 1 self.has_other_agents = False if self.n_missing > 0: faraway_position = carlautil.transform_to_location_ndarray(self.player_transform) \ + np.array([0, 300, 0]) faraway_tile = np.tile( faraway_position, (self.n_missing, len(self.others_transforms), 1)) if self.n_missing == self.A - 1: self.agent_positions_world = faraway_tile self.agent_yaws = np.zeros(self.A - 1) else: self.agent_positions_world = np.concatenate( (self.agent_positions_world, faraway_tile), axis=0) self.agent_yaws = np.concatenate( (self.agent_yaws, np.zeros(self.n_missing)), axis=0) # agent_positions_local : ndarray of shape (A-1, len(self.others_transforms), 3) self.agent_positions_local = self.agent_positions_world \ - carlautil.transform_to_location_ndarray(self.player_transform)
def save_dataset_sample(self, frame, episode, observation, trajectory_feeds, lidar_feeds, player_bbox, lidar_sensor, lidar_params, save_directory, make_sample_name, sample_labels): """ The original PRECOG dataset has these relevant keys: - episode : int - frame : int - lidar_params : dict with keys ['hist_max_per_pixel', 'val_obstacle', 'meters_max', 'pixels_per_meter'] - player_future : matrix of shape (20, 3) - agent_futures : matrix of shape (4, 20, 3) - player_past : matrix of shape (10, 3) - agent_pasts : matrix of shape (4, 10, 3) - overhead_features : matrix shape (200, 200, 4) Parameters ---------- player_bbox : carla.BoundingBox Player's bounding box used to get vehicle dimensions. lidar_sensor : carla.Sensor lidar_params : LidarParams Lidar parameters save_directory : str Directory to save to. make_sample_name : lambda frame Function to generate name for sample sample_labels : SampleLabelMap """ earlier_frame = frame - self.T player_transform, other_id_ordering, \ feed_dict = trajectory_feeds[earlier_frame] ## TODO: Maybe add some check to not save samples without other agents. # if not observation.has_other_agents: # logging.debug("Could not obtain other agents for this sample. Skipping.") # return observation = observation.copy_with_new_ordering(other_id_ordering) lidar_measurement = lidar_feeds[earlier_frame] player_past = feed_dict.player_past agent_pasts = feed_dict.agent_pasts player_yaw = feed_dict.player_yaw agent_yaws = feed_dict.agent_yaws player_future = observation.player_positions_world[1:self.T+1, :3] \ - carlautil.transform_to_location_ndarray(player_transform) agent_futures = observation.agent_positions_world[:, 1:self.T+1, :3] \ - carlautil.transform_to_location_ndarray(player_transform) sample_labels.n_present = observation.n_present sample = DrivingSample(episode=episode, frame=frame, lidar_params=lidar_params, sample_labels=sample_labels, save_directory=save_directory, sample_name=make_sample_name(earlier_frame), lidar_measurement=lidar_measurement, player_bbox=player_bbox, player_past=player_past, agent_pasts=agent_pasts, player_future=player_future, agent_futures=agent_futures, player_yaw=player_yaw, agent_yaws=agent_yaws, should_augment=self.should_augment, n_augments=self.n_augments) sample.save()