Ejemplo n.º 1
0
    def __init__(
        self,
        theta1: torch.Tensor,
        theta2: torch.Tensor,
        radius: torch.Tensor,
        center: torch.Tensor,
        distance1: torch.Tensor,
        dt: float = 0.10,
        v_lim: torch.Tensor = torch.ones(1) * 8.0,
    ):
        super().__init__()
        self.theta1 = theta1.unsqueeze(1)
        self.sign = torch.sign(
            angle_normalize(theta2.unsqueeze(1) - self.theta1))
        # Pass radius as 0 (actually inf) to produce a straight road
        self.radius = radius.unsqueeze(1)
        self.distance1 = distance1.unsqueeze(1)
        self.distances = torch.zeros_like(self.distance1)
        self.center = center
        self.circ_arc = self.radius * math.pi / 2

        self.turns = self.radius != 0

        self.dt = dt
        self.v_lim = v_lim.unsqueeze(1)
        self.v_lim_neg = -self.v_lim

        self.device = torch.device("cpu")
        self.to(self.v_lim.device)
        self.nbatch = self.v_lim.size(0)
Ejemplo n.º 2
0
    def forward(self, state: torch.Tensor, action: torch.Tensor):
        """
        Args:
            state: N x 4 Dimensional Tensor, where N is the batch size, and
                   the second dimension represents
                   {x coordinate, y coordinate, velocity, orientation}
            action: N x 2 Dimensional Tensor, where N is the batch size, and
                    the second dimension represents
                    {steering angle, acceleration}
        """
        dt = self.dt
        x, y, v, theta = [state[:, i:(i + 1)] for i in range(4)]
        steering, acceleration = [action[:, i:(i + 1)] for i in range(2)]

        beta = torch.atan(torch.tan(steering) / 2)

        tb = theta + beta
        vdt = v * dt

        x = x + vdt * torch.cos(tb)
        y = y + vdt * torch.sin(tb)

        v = torch.min(torch.max(v + acceleration * dt, self.v_lim_neg),
                      self.v_lim)

        theta = theta + v * torch.sin(beta) * 2 / self.dim
        theta = angle_normalize(theta)

        return torch.cat([x, y, v, theta], dim=1)
Ejemplo n.º 3
0
    def __init__(
        self,
        cps: torch.Tensor,
        p_num: int = 5,
        alpha: int = 0.5,
        dt: float = 0.10,
        v_lim: torch.Tensor = torch.ones(1) * 8.0,
    ):
        super().__init__()
        self.dt = dt
        self.v_lim = v_lim.unsqueeze(1)
        self.v_lim_neg = -self.v_lim

        self.device = cps.device
        self.to(self.v_lim.device)
        self.nbatch = v_lim.size(0)

        self.motion = CatmullRomSpline(cps, p_num, alpha)
        self.distances = torch.zeros(self.nbatch, 1, device=self.device)
        diff = self.motion.diff
        ratio = diff[:, :, 1] / (diff[:, :, 0] + EPS)
        self.arc_lengths = self.motion.arc_lengths
        self.curve_lengths = self.motion.curve_length.unsqueeze(1) - 1e-3
        # Assume that last 2 points are not part of the spline.
        self.distance_proxy = ((cps[:, :-3, :] -
                                cps[:, 1:-2, :]).pow(2).sum(-1).sqrt().sum(
                                    -1, keepdim=True))
        self.theta = angle_normalize(
            torch.where(
                diff[:, :, 0] > 0,
                torch.atan(ratio),
                math.pi + torch.atan(ratio),
            )).unsqueeze(-1)
Ejemplo n.º 4
0
    def __init__(
        self,
        position: torch.Tensor,  # N x 2
        orientation: torch.Tensor,  # N x 1
        destination: torch.Tensor,  # N x 2
        dest_orientation: torch.Tensor,  # N x 1
        dimensions: torch.Tensor = torch.as_tensor([[4.48, 2.2]]),  # N x 2
        initial_speed: torch.Tensor = torch.zeros(1, 1),  # N x 1
        name: str = "car",
        min_lidar_range: float = 5.0,
        max_lidar_range: float = 50.0,
        vision_range: float = 50.0,
    ):
        super().__init__()
        self.name = name

        self.position = position
        self.orientation = angle_normalize(orientation)
        self.destination = destination
        self.dest_orientation = angle_normalize(dest_orientation)
        self.dimensions = dimensions

        self.nbatch = self.position.size(0)
        self.bool_buffer = torch.zeros(1).bool()

        self.speed = initial_speed
        self.safety_circle = (1.3 * torch.sqrt(
            ((self.dimensions / 2)**2).sum(1, keepdim=True)).detach())
        self.area = math.pi * self.safety_circle**2

        mul_factor = (torch.as_tensor([[1, 1], [1, -1], [-1, -1],
                                       [-1, 1]]).unsqueeze(0).type_as(
                                           self.dimensions))

        self.base_coordinates = mul_factor * self.dimensions.unsqueeze(1) / 2
        self.mul_factor = mul_factor
        self.device = torch.device("cpu")

        self.to(self.position.device)

        self.cached_coordinates = False
        self.coordinates = self._get_coordinates()

        self.max_lidar_range = max_lidar_range
        self.min_lidar_range = min_lidar_range
        self.vision_range = vision_range
Ejemplo n.º 5
0
 def optimal_heading_to_point(self, point: torch.Tensor):
     vec = point - self.position
     vec = vec / (torch.norm(vec, dim=1, keepdim=True) + 1e-7)  # N x 2
     phi = torch.atan2(vec[:, 1:], vec[:, :1])
     theta = torch.where(
         self.orientation >= 0,
         self.orientation,
         self.orientation + 2 * math.pi,
     )
     return angle_normalize(phi - theta)
Ejemplo n.º 6
0
 def optimal_heading_to_points(self, points: torch.Tensor):  # N x B x 2
     vec = points - self.position.unsqueeze(1)
     vec = vec / (torch.norm(vec, dim=2, keepdim=True) + 1e-7)  # N X B x 2
     phi = torch.atan2(vec[..., 1:], vec[..., :1])  # N x B x 1
     theta = torch.where(
         self.orientation >= 0,
         self.orientation,
         self.orientation + 2 * math.pi,
     ).unsqueeze(1)  # N x 1 x 1
     diff = phi - theta
     return angle_normalize(diff.view(-1, 1)).view(points.shape[:2])
Ejemplo n.º 7
0
    def add_vehicle(
            self,
            position: torch.Tensor,  # 1 x 2
            orientation: torch.Tensor,  # 1 x 1
            destination: torch.Tensor,  # 1 x 2
            dest_orientation: torch.Tensor,  # 1 x 1
            dimensions: torch.Tensor = torch.as_tensor([[4.48, 2.2]]),  # 1 x 2
            initial_speed: torch.Tensor = torch.zeros(1, 1),  # 1 x 1
    ) -> bool:
        position = position.to(self.device)
        orientation = angle_normalize(orientation.to(self.device))
        dimensions = dimensions.to(self.device)
        base_coordinates = self.mul_factor * dimensions.unsqueeze(1) / 2
        rot_mat = get_2d_rotation_matrix(orientation[:, 0])
        coordinates = torch.matmul(base_coordinates[0], rot_mat) + position
        check = self.collision_check_with_rectangle(
            coordinates, torch.cat([coordinates[1:, :], coordinates[0:1, :]]))

        if check.any():
            return False

        self.position = torch.cat([self.position, position])
        self.orientation = torch.cat([self.orientation, orientation])
        self.destination = torch.cat(
            [self.destination, destination.to(self.device)])
        self.dest_orientation = torch.cat([
            self.dest_orientation,
            angle_normalize(dest_orientation.to(self.device)),
        ])
        self.dimensions = torch.cat([self.dimensions, dimensions])
        self.speed = torch.cat([self.speed, initial_speed.to(self.device)])
        self.base_coordinates = torch.cat(
            [self.base_coordinates, base_coordinates])
        self.coordinates = torch.cat(
            [self.coordinates, coordinates[None, :, :]])
        self.nbatch += 1

        return True
Ejemplo n.º 8
0
    def nearest_graph_node(
            self,
            pt: torch.Tensor,
            orientation: torch.Tensor  # N x 2  # N x 1
    ):
        pt = pt.unsqueeze(1)  # N x 1 x 2

        vec = self.vertices.unsqueeze(0) - pt
        distances = vec.pow(2).sum(-1)
        vec = vec / (torch.norm(vec, dim=-1, keepdim=True) + 1e-7)  # N x B x 2

        cur_vec = torch.cat([torch.cos(orientation),
                             torch.sin(orientation)],
                            dim=-1).unsqueeze(1)  # N x 1 x 2
        theta = angle_normalize(
            torch.acos((vec * cur_vec).sum(-1).clamp(-1.0 + 1e-5, 1.0 - 1e-5)))

        return (distances + (theta.abs() > math.pi / 2) * 1e12).argmin(-1)  # N
Ejemplo n.º 9
0
    def __init__(
        self,
        position: torch.Tensor,  # N x 2
        dims: torch.Tensor,  # N x 2
        orientation: torch.Tensor,  # N x 1
        velocity: torch.Tensor,  # N x 1
        dt: float = 0.1,
        name: str = "pedestrian",
    ):
        super().__init__()

        self.name = name

        self.position = position
        self.orientation = angle_normalize(orientation)
        self.dimensions = dims
        self.dt = dt

        self.nbatch = self.position.size(0)

        self.speed = velocity
        self.velocity = velocity * torch.cat(
            [
                torch.cos(orientation),
                torch.sin(orientation),
            ],
            dim=-1,
        )

        mul_factor = (torch.as_tensor([[1, 1], [1, -1], [-1, -1],
                                       [-1, 1]]).unsqueeze(0).type_as(
                                           self.dimensions))

        self.base_coordinates = mul_factor * self.dimensions.unsqueeze(1) / 2
        self.mul_factor = mul_factor
        self.device = torch.device("cpu")

        self.to(self.position.device)

        self.cached_coordinates = False
        self.coordinates = self._get_coordinates()