Пример #1
0
def OrbitAnimal(cx, cy, radius, speed, altitude, camera_angle, animal):
    """
    @param cx: The x position of our orbit starting location
    @param cy: The x position of our orbit starting location
    @param radius: The radius of the orbit circle
    @param speed: The speed the drone should more, it's hard to take photos when flying fast
    @param altitude: The altidude we want to fly at, dont fly too high!
    @param camera_angle: The angle of the camera
    @param animal: The name of the animal, used to prefix the photos
    """

    x = cx - radius
    y = cy

    # set camera angle
    client.simSetCameraOrientation(0, airsim.to_quaternion(
        camera_angle * math.pi / 180, 0, 0))  # radians

    # move the drone to the requested location
    print("moving to position...")
    client.moveToPositionAsync(
        x, y, z, 1, 60, drivetrain=airsim.DrivetrainType.MaxDegreeOfFreedom, yaw_mode=airsim.YawMode(False, 0)).join()
    pos = client.getMultirotorState().kinematics_estimated.position

    dx = x - pos.x_val
    dy = y - pos.y_val
    yaw = airsim.to_eularian_angles(
        client.getMultirotorState().kinematics_estimated.orientation)[2]

    # keep the drone on target, it's windy out there!
    print("correcting position and yaw...")
    while abs(dx) > 1 or abs(dy) > 1 or abs(yaw) > 0.1:
        client.moveToPositionAsync(
            x, y, z, 0.25, 60, drivetrain=airsim.DrivetrainType.MaxDegreeOfFreedom, yaw_mode=airsim.YawMode(False, 0)).join()
        pos = client.getMultirotorState().kinematics_estimated.position
        dx = x - pos.x_val
        dy = y - pos.y_val
        yaw = airsim.to_eularian_angles(
            client.getMultirotorState().kinematics_estimated.orientation)[2]
        print("yaw is {}".format(yaw))

    print("location is off by {},{}".format(dx, dy))

    o = airsim.to_eularian_angles(
        client.getMultirotorState().kinematics_estimated.orientation)
    print("yaw is {}".format(o[2]))

    # let's orbit around the animal and take some photos
    nav = drone_orbit.OrbitNavigator(photo_prefix=animal, radius=radius, altitude=altitude, speed=speed, iterations=1, center=[
                                     cx - pos.x_val, cy - pos.y_val], snapshots=30, image_dir="./drone_images/")
    nav.start()
Пример #2
0
def get_relloc_camera(camera_id='1', mode='rot_mat'):
    """
    Function to get position of human relative to the camera

    Parameters
    ----------
        camera_id: ID of the camera from which we want to observe the environment
        mode:   if 'angle', differece in angles (degrees) is returned
                if 'rot_mat' rotation matrix is returned. C.dot(T) = O

    Returns
    -------
        rel_pos: Relative position (x, y, z). Numpy array
        rel_orient: if mode == angle: Relative orientation (roll, pitch, yaw). Numpy array
                    if mode == rot_mat: Reotation matix is returned
    """
    # Get human's pose
    human_pose = client.simGetObjectPose(HUMAN_ID)
    # Get camera's pose
    camera_pose = client.simGetCameraInfo(camera_id).pose
    drone_pose = client.simGetVehiclePose()

    # Get relative position
    rel_pos = (human_pose.position - camera_pose.position).to_numpy_array()

    if mode == 'angle':
        # Get relative orientation
        human_rpy = np.array(airsim.to_eularian_angles(human_pose.orientation))  # quaternion to Eularian
        camera_rpy = np.array(airsim.to_eularian_angles(camera_pose.orientation))  # quaternion to Eularian
        rel_orient = human_rpy - camera_rpy
    if mode == 'rot_mat':
        # Get rotation matrix
        human_rot = R.from_quat(human_pose.orientation.to_numpy_array()).as_matrix()
        camera_rot = R.from_quat(camera_pose.orientation.to_numpy_array()).as_matrix()

        # Calculate transformation/rotation matrix
        camera_rot_inv = np.linalg.inv(camera_rot)
        rel_orient = camera_rot_inv.dot(human_rot)
        rel_pos = camera_rot_inv.dot(rel_pos)

    com_rot = np.array([[0, 1, 0], [0, 0, 1], [1, 0, 0]])
    rel_orient = com_rot.dot(rel_orient.dot(com_rot.T))
    rel_pos = com_rot.dot(rel_pos)

    rot = R.from_matrix(rel_orient)
    rel_orient = rot.as_euler('zyx', degrees=True)

    return rel_pos, rel_orient
Пример #3
0
    def calculate_camera(self, v_identifier):
        """
        Calculate camera heading
        """
        current_pos_state = self.client.getMultirotorState(
        ).kinematics_estimated
        current_pos_local = np.asarray([
            current_pos_state.position.x_val, current_pos_state.position.y_val,
            current_pos_state.position.z_val
        ])
        # print("now:"+str(current_pos_local))

        camera_angle = self.calculate_camera_to_center(current_pos_local)
        global g_current_orientation_pitch, g_current_orientation_roll, g_current_orientation_yaw
        g_current_orientation_pitch = camera_angle[0]
        g_current_orientation_roll = 0
        g_current_orientation_yaw = camera_angle[
            2] - airsim.to_eularian_angles(current_pos_state.orientation)[2]
        #g_current_orientation_yaw = camera_angle[2]

        # log_file("camera",
        #          v_identifier+":desired orientation:" + str(g_current_orientation_pitch) + ","
        #          + str(g_current_orientation_yaw))

        self.client.simSetCameraOrientation(
            "front_center",
            airsim.to_quaternion(g_current_orientation_pitch,
                                 g_current_orientation_roll,
                                 g_current_orientation_yaw))
Пример #4
0
    def compute_observation(self):
        # retrieve visual observation
        image_type = ImageInfo[self.image_type]
        responses = self.client.simGetImages([airsim.ImageRequest(0, image_type.index, image_type.as_float, False)])
        response = responses[0]

        if image_type.as_float:
            img1d = np.array(response.image_data_float, dtype=np.float)
            img1d = 255 / np.maximum(np.ones(img1d.size), img1d)
            img2d = np.reshape(img1d, (response.height, response.width))
            image = np.expand_dims(Image.fromarray(img2d).convert('L'), axis=2)
        else:
            # get numpy array
            img1d = np.fromstring(response.image_data_uint8, dtype=np.uint8)
            image = img1d.reshape(response.height, response.width, image_type.channel_size)
            image = np.ascontiguousarray(image, dtype=np.uint8)

        # retrieve poses for both human and robot
        pose = self.client.simGetVehiclePose()
        r = self._distance_to_goal(pose.position)
        yaw = airsim.to_eularian_angles(pose.orientation)[2]
        phi = np.arctan2(self.goal_position[1] - pose.position.y_val, self.goal_position[0] - pose.position.x_val) - yaw
        goal = Goal(r, phi)
        logging.debug('Goal distance: {:.2f}, relative angle: {:.2f}'.format(r, np.rad2deg(phi)))

        observation = Observation(image, goal)

        return observation
Пример #5
0
def animate(i, xs, ys, client):
    # Aquire and parse data from serial port
    vehiclePose = client.simGetVehiclePose()
    position = vehiclePose.position
    orientation = airsim.to_eularian_angles(vehiclePose.orientation)
    print(orientation[0], orientation[1], orientation[2])
    # Add x and y to lists
    xs.append(position.x_val)
    ys.append(position.y_val)
    # rs.append(0.5)

    # Limit x and y lists to 20 items
    # xs = xs[-20:]
    # ys = ys[-20:]

    # Draw x and y lists
    ax.clear()
    ax.plot(xs, ys, label="Experimental Probability")
    # ax.quiver
    yaw = orientation[2]
    new_x = np.cos(yaw)
    new_y = np.sin(yaw)
    ax.quiver(position.x_val, position.y_val, new_x, new_y)
    # ax.plot(xs, rs, label="Theoretical Probability")
    ax.set_ylim([-200, 200])
    ax.set_xlim([-200, 200])
    # ax.set_ylim(np.min(ys) - np.std(ys),np.max(ys) + np.std(ys))
    # ax.set_xlim(np.min(xs) - np.std(xs), np.max(xs) + np.std(xs))
    # Format plot
    plt.xticks(rotation=45, ha='right')
    plt.subplots_adjust(bottom=0.30)
    plt.title('This is how I roll...')
    plt.ylabel('Relative frequency')
    plt.legend()
Пример #6
0
def getParamExtrinsicFollow():

    # http://ksimek.github.io/2012/08/22/extrinsic/
    global state, posIndex

    uavX = state[posIndex][0].kinematics_estimated.position.x_val
    uavY = state[posIndex][0].kinematics_estimated.position.y_val
    uavZ = state[posIndex][0].kinematics_estimated.position.z_val

    uavPitch, uavRoll, uavYaw = airsim.to_eularian_angles(
        state[posIndex][0].kinematics_estimated.orientation)

    # rotation = R.from_euler('xyz', [90, 0, 180], degrees=True)
    rotationX = 90 + np.degrees(uavPitch)
    rotationY = np.degrees(uavRoll)
    rotationZ = 90 + np.degrees(uavYaw)

    rotation = R.from_euler('xyz', [rotationX, rotationY, rotationZ],
                            degrees=True)
    Rc = rotation.as_dcm()
    C = np.array([uavX, uavY, uavZ])

    t = np.dot(-Rc.T, C)

    # Note that Rc here should be Transpose, but I can not figure out how to do it
    # in numpy ... (fail me ...)
    params = np.vstack((Rc, [t])).T

    # to have a square matrix
    params = np.vstack((params, [[0, 0, 0, 1]]))

    return params
Пример #7
0
def animate(i, xs, ys, client):
    throttle, steering = SimpleDrive(client)
    # Aquire and parse data from serial port
    vehiclePose = client.simGetVehiclePose()
    position = vehiclePose.position
    orientation = airsim.to_eularian_angles(vehiclePose.orientation)
    # Add x and y to lists
    xs.append(position.x_val)
    ys.append(position.y_val)
    steerings.append(steering)
    throttles.append(throttle)
    # rs.append(0.5)
    # Limit x and y lists to 20 items
    # xs = xs[-20:]
    # ys = ys[-20:]
    # Draw x and y lists
    ax[0].clear()
    ax[1].clear()
    # ax.plot(xs, ys, label="Experimental Probability")
    # ax.quiver
    yaw = orientation[2]
    new_x = np.cos(yaw)
    new_y = np.sin(yaw)
    # ax.quiver(position.x_val, position.y_val, new_x, new_y)
    ff = simpleDriver.get_objective_func()
    x = np.linspace(0, 1000)
    ax[0].plot(x, list(map(ff, x)))
    # Steering
    ax[1].plot(steerings)
Пример #8
0
def reset(client, scene=None):
    """
    Called from the reset method in AirsimEnv. This method resets the AirSim simulation, respawns the UAV etc.
    :param client: AirsimEnv object
    :param scene: The AirSim scene
    :return: None
    """
    client.reset()
    if scene is not None:
        time.sleep(0.2)
        pose = client.simGetVehiclePose()
        start_pos = valid_spawn(scene)
        pose.position.x_val = start_pos[0]
        pose.position.y_val = start_pos[1]
        pose.position.z_val = 0
        pitch, roll, yaw = airsim.to_eularian_angles(pose.orientation)
        yaw = np.random.rand() * 2 * np.pi
        pose.orientation = airsim.to_quaternion(pitch, roll, yaw)
        client.simSetVehiclePose(pose, True)

    time.sleep(0.2)
    client.enableApiControl(True)
    client.armDisarm(True)
    hover(client)
    custom_takeoff(client)
    hover(client)
Пример #9
0
def vectorTo3D(pixelX, pixelY, camInfo, depthImage, maxDistView = None, vectorDistances = None):
    """From image pixels (2D) to relative(!) 3D coordinates"""

    if isinstance(maxDistView, float) or isinstance(maxDistView, int):
        depthImage[ depthImage > maxDistView] = maxDistView

    height, width = depthImage.shape
    # print(f"Image size: width:{width} -- height:{height}")
    halfWidth = width/2
    halfHeight= height/2

    camPitch, camRoll,camYaw = airsim.to_eularian_angles(camInfo.pose.orientation)

    # to rads (its on degrees now)
    hFoV = np.radians(camInfo.fov)
    vFoV = (height/width)*hFoV

    pointsH = np.array(pixelY, dtype=int)
    pointsW = np.array(pixelX, dtype=int)

    pixelPitch = ((pointsH-halfHeight)/halfHeight) * (vFoV/2)
    pixelYaw = ((pointsW-halfWidth)/halfWidth) * (hFoV/2)

    theta = (np.pi/2) - pixelPitch
    # turn
    phi = pixelYaw

    r = vectorDistances

    x = r*np.sin(theta)*np.cos(phi)
    y = r*np.sin(theta)*np.sin(phi)
    z = r*np.cos(theta)

    return x, y, z
Пример #10
0
    def angluarVelocityThrottleController(self,
                                          cnet,
                                          q,
                                          vcy,
                                          R_cb=np.array([[0, 0, 1], [1, 0, 0],
                                                         [0, 1, 0]])):
        """
        - function: 2526中角速率控制方法
        - params: 
            - cnet: 目标在图像上的坐标
            - q: 飞机在世界坐标系的四元数
            - vcy: 飞机竖直方向上速度
            - R_cb: 相机系到机体系旋转矩阵。默认光轴与机头方向一致,图像平面水平为xc,光轴为zc,机体系为FRD
        - return:
            - cmd: [roll_rate, pitch_rate, yaw_rate, throttle], 三轴角速度和油门, FRD
        """
        ex, ey = cent[0] - self.x0, cent[1] - self.y0
        print("ex: {}, ey: {}".format(ex, ey))
        pitch, roll, yaw = airsim.to_eularian_angles(q)
        print("pitch: {}, roll: {}, yaw: {}, vcy: {}".format(
            pitch, roll, yaw, vcy))

        wcx = -self.k2 * ey - self.k3 * (pitch - self.theta_d)
        print(
            "-self.k2*ey: {}, self.theta_d: {}, -self.k3*(pitch-self.theta_d): {}"
            .format(-self.k2 * ey, self.theta_d,
                    -self.k3 * (pitch - self.theta_d)))
        wcy = saturation(self.k5 * ex, 1)
        wcz = self.k6 * (roll - self.phi_d)
        print("wcx: {}, wcy: {}, wcz: {}".format(wcx, wcy, wcz))
        wbx, wby, wbz = R_cb.dot(np.array([wcx, wcy, wcz]))
        throttle = self.hover * (self.k4 / self.g *
                                 (vcy - self.k1 * ey) + 1) / np.cos(pitch)
        return wbx, wby, wbz, throttle
Пример #11
0
    def takeAction(self, action):
        position = self.client.simGetVehiclePose().position
        yaw = airsim.to_eularian_angles(
            self.client.simGetVehiclePose().orientation)[2]
        if action == 0:
            yaw_change = 0.0
        elif action == 1:
            yaw_change = 30.0
        elif action == 2:
            yaw_change = -30.0

        action_yaw = yaw + yaw_change

        vx = math.cos(action_yaw)
        vy = math.sin(action_yaw)
        v_norm = np.linalg.norm([vx, vy])

        vx = vx / v_norm * self.MOVE_RATE
        vy = vy / v_norm * self.MOVE_RATE
        new_x = position.x_val + vx
        new_y = position.y_val + vy
        new_pose = airsim.Pose(airsim.Vector3r(new_x, new_y, self.HEIGHT),
                               airsim.utils.to_quaternion(0, 0, action_yaw))
        self.client.simSetVehiclePose(new_pose, False)
        # time.sleep(self.CV_SLEEP_TIME)

        collided = self.checkCollision()

        return collided
Пример #12
0
    def AE(self, goal, vehicle_name=''):

        pos = self.simGetGroundTruthKinematics(vehicle_name="").position

        xdistance = (goal[0] - (pos.x_val))
        ydistance = (goal[1] - (pos.y_val))
        zdistance = (0 - (pos.z_val))

        #elevation=math.atan((zdistance)/np.sqrt(np.power(xdistance,2)+np.power(ydistance,2)))
        #elevation=((math.degrees(elevation) - 90) % 360) - 90

        elevation = math.atan2(
            (zdistance),
            np.sqrt(np.power(xdistance, 2) + np.power(ydistance, 2)))
        elevation = (math.degrees(elevation))

        pitch, roll, yaw = airsim.to_eularian_angles(
            client.simGetVehiclePose(vehicle_name="").orientation)
        yaw = math.degrees(yaw)

        pos_angle = math.atan2(goal[1] - pos.y_val, goal[0] - pos.x_val)
        pos_angle = math.degrees(pos_angle) % 360

        track = math.radians(pos_angle - yaw)

        #track=((track - math.pi) % 2*math.pi) - math.pi
        track = ((math.degrees(track) - 180) % 360) - 180

        #track=reMap(track,180,-180,1,-1)

        AE = np.array([track, elevation])

        return AE
Пример #13
0
def collisionCorrection(timeOutCollision=300):

    global controllers

    wayPointDict = {}
    for ctrl in controllers:
        wayPointDict[ctrl.getName()] = 0
    time.sleep(timeOutCollision)

    running = True

    while running:

        time.sleep(timeOutCollision)

        for ctrl in controllers:

            if ctrl.getWayPoint() == wayPointDict[ctrl.getName()]:

                print(
                    f"{ctrl.getName()} is at the same spot after {timeOutCollision}[sec]. Check collision"
                )

                try:
                    state = ctrl.getState()
                    print(f"{ctrl.getName()} got state")
                except:
                    print(f"{ctrl.getName()} could not get state and failed")

                try:
                    if state.collision.has_collided:

                        print(f"{ctrl.getName()} has collided ...")

                        stateList = ctrl.getStateList()
                        x = stateList[-2].kinecmatics_estimated.position.x_val
                        y = stateList[-2].kinecmatics_estimated.position.y_val
                        z = stateList[-2].kinecmatics_estimated.position.z_val
                        _, _, yaw = airsim.to_eularian_angles(
                            stateList[-3].kinematics_estimated.orientation)

                        task = ctrl.moveToPositionYawModeAsync(x, y, z, 1, yaw)
                        task.join()

                        print(
                            f"{ctrl.getName()}moved successfully to previous position"
                        )

                except:
                    print(
                        f"{ctrl.getName()} failed at moving drone. Debug ...")
                    # import pdb; pdb.set_trace()

            else:
                print(
                    f"{ctrl.getName()} has changed spot. Update wayPointDict")
                wayPointDict[ctrl.getName()] = ctrl.getWayPoint()

            if ctrl.getWayPoint() >= (ctrl.wayPointSize - 2):
                running = False
Пример #14
0
def get_orientation(client):
    """
    Makes call to AirSim and returns orientation (yaw angle)
    :param client: AirsimEnv object
    :return: Orientation (yaw) of agent in radians.
    """
    q = client.simGetGroundTruthKinematics().orientation
    return airsim.to_eularian_angles(q)[2]
Пример #15
0
def getCarOrientation(car_state):
    """将弧度转换成0-360度,注意原来的弧度是-3.14到3.14,
       且y坐标的正向朝下,角度正方向为顺时针方向
       return: 0-360
    """
    _, _, arc = airsim.to_eularian_angles(
        car_state.kinematics_estimated.orientation)

    return arc
Пример #16
0
    def straight(self, duration, speed):
        pitch, roll, yaw = airsim.to_eularian_angles(
            self.client.simGetVehiclePose().orientation)
        vx = math.cos(yaw) * speed
        vy = math.sin(yaw) * speed
        self.client.moveByVelocityZAsync(
            vx, vy, self.z, duration,
            airsim.DrivetrainType.ForwardOnly).join()

        start = time.time()
        return start, duration
 def moveByVolocity(self, velocity, angle):
     pitch, roll, self.yaw = airsim.to_eularian_angles(
         self.client.simGetVehiclePose().orientation)
     self.yaw = (self.yaw + angle)
     self.vx = velocity * math.cos(self.yaw)
     self.vy = velocity * math.sin(self.yaw)
     if (self.vx == 0 and self.vy == 0):
         self.vx = velocity * math.cos(self.yaw)
         self.vy = velocity * math.sin(self.yaw)
     self.client.moveByVelocityZAsync(self.vx, self.vy, -2, 0.5,
                                      airsim.DrivetrainType.ForwardOnly,
                                      airsim.YawMode(False, 0)).join()
Пример #18
0
 def straight(self, speed):
     pitch, roll, yaw = airsim.to_eularian_angles(
         self.client.simGetVehiclePose().orientation)
     dx = math.cos(yaw) * speed
     dy = math.sin(yaw) * speed
     x = self.client.simGetVehiclePose().position.x_val
     y = self.client.simGetVehiclePose().position.y_val
     """client.moveByVelocityAsync() causes z position to dip too much due to lack of PID control"""
     self.client.moveToPositionAsync(x + dx, y + dy, self.z, speed,
                                     airsim.DrivetrainType.ForwardOnly)
     init_time = time.time()
     return init_time
Пример #19
0
    def goalDirection(self, goal, pos):

        yaw = airsim.to_eularian_angles(
            self.client.simGetVehiclePose().orientation)[2]
        yaw = math.degrees(yaw)

        pos_angle = math.atan2(goal[1] - pos.y_val, goal[0] - pos.x_val)
        pos_angle = math.degrees(pos_angle) % 360

        track = math.radians(pos_angle - yaw)

        return ((math.degrees(track) - 180) % 360) - 180
Пример #20
0
def load_airsim(airsim_client):
    rpcinfo = airsim_client.getMultirotorState()
    gps_location = rpcinfo.gps_location
    kinematics_estimated = rpcinfo.kinematics_estimated
    pitch, roll, yaw = airsim.to_eularian_angles(
        rpcinfo.kinematics_estimated.orientation)
    homepoint = airsim_client.getHomeGeoPoint()
    global way_point_status
    global initialize_height
    if (initialize_height is not 0):
        height_above_takeoff = - \
            (kinematics_estimated.position.z_val) + initialize_height
    else:
        height_above_takeoff = initialize_height
    telemetry = {
        "battery_state": {
            "percentage": 70.04
        },
        "distance_from_home": 1561.4,
        "gimbal": {
            "roll": roll,
            "pitch": pitch,
            "yaw": yaw
        },
        "height_above_takeoff": height_above_takeoff,
        "gps_health": 5,
        "heading": math.degrees(yaw),
        "velocity": {
            "x": kinematics_estimated.linear_velocity.x_val,
            "y": kinematics_estimated.linear_velocity.y_val,
            "z": kinematics_estimated.linear_velocity.z_val
        },
        "gps_position": {
            "latitude": gps_location.latitude,
            "altitude": gps_location.altitude,
            "longitude": gps_location.longitude
        },
        "last_change_time": rpcinfo.timestamp,
        "lastHome": {
            "latitude": homepoint.latitude,
            "operationalAlt": homepoint.altitude,
            "longitude": homepoint.longitude
        },
        "owner": "droneService",
        "state": {
            "armed": True
        },
        "wayPoints": {
            "status": way_point_status
        },
        "keepAlive": rpcinfo.timestamp
    }
    return telemetry
Пример #21
0
def get_relloc_vehicle(mode='rot_mat'):
    """
    Function to get position of human relative to the drone

    Parameters
    ----------
        mode:   if 'angle', differece in angles (degrees) is returned
                if 'rot_mat' rotation matrix is returned. C.dot(T) = O

    Returns
    -------
        rel_pos: Relative position (x, y, z). Numpy array
        rel_orient: if mode == angle: Relative orientation (roll, pitch, yaw). Numpy array
                    if mode == rot_mat: Reotation matix is returned
    """

    # Get human's pose
    human_pose = client.simGetObjectPose(HUMAN_ID)
    # Get drone's pose
    drone_pose = client.simGetVehiclePose()

    # Get relative position
    rel_pos = (human_pose.position - drone_pose.position).to_numpy_array()

    if mode == 'angle':
        # Get relative orientation
        human_rpy = np.array(airsim.to_eularian_angles(human_pose.orientation))  # quaternion to Eularian
        drone_rpy = np.array(airsim.to_eularian_angles(drone_pose.orientation))  # quaternion to Eularian
        rel_orient = human_rpy - drone_rpy
    if mode == 'rot_mat':
        # Get rotation matrix
        human_rot = R.from_quat(human_pose.orientation.to_numpy_array()).as_matrix()
        drone_rot = R.from_quat(drone_pose.orientation.to_numpy_array()).as_matrix()

        # Calculate transformation/rotation matrix
        drone_rot_inv = np.linalg.inv(drone_rot)
        rel_orient = drone_rot_inv.dot(human_rot)
        rel_pos = drone_rot_inv.dot(rel_pos)

    return rel_pos, rel_orient
Пример #22
0
    def step(self, action_id):

        self.client.enableApiControl(True)
        self.client.armDisarm(True)

        self.steps += 1

        action = self.id2action(action_id)

        if self.stepped_simulation:
            self.client.simPause(False)

        if self.drone:
            # transform velocity according to the drone's orientation.... there is probably a way to do this more elegantly
            yaw = airsim.to_eularian_angles(
                self.client.simGetVehiclePose().orientation)[2]

            v_x = (action[DroneActIdx.VX.value] * np.cos(yaw) -
                   action[DroneActIdx.VY.value] * np.sin(yaw))
            v_y = (action[DroneActIdx.VX.value] * np.sin(yaw) +
                   action[DroneActIdx.VY.value] * np.cos(yaw))

            self.client.moveByVelocityAsync(
                v_x,
                v_y,
                action[DroneActIdx.VZ.value],
                self.step_duration,
                yaw_mode=airsim.YawMode(
                    is_rate=True,
                    yaw_or_rate=action[DroneActIdx.Yaw.value])).join()
#            self.client.moveByAngleThrottleAsync(action[DroneActIdx.Pitch.value], action[DroneActIdx.Roll.value],
#                                                 action[DroneActIdx.Throttle.value], action[DroneActIdx.Yaw.value],
#                                                 self.step_duration).join()

        else:
            self.client.moveByVelocity(action[CarActIdx.VX.value],
                                       action[CarActIdx.VY.value],
                                       action[CarActIdx.VZ.value],
                                       self.step_duration).join()

        if self.stepped_simulation:
            self.client.simContinueForTime(self.step_duration)

        state = self._get_state()
        step_reward = self._reward_function(state)
        terminal = self._terminal_state(state)

        self.acc_reward += step_reward

        return state, step_reward, terminal
Пример #23
0
    def Control_cloth(self):
        pos_a = self.client.simGetVehiclePose().position.to_numpy_array()[0:2]
        orientation = self.client.getCarState(
        ).kinematics_estimated.orientation
        pitch, roll, yaw = airsim.to_eularian_angles(orientation)
        cones_right_in, cones_left_in = self.get_cone(self.lidar_R)
        cones_left_in = cones_left_in - pos_a
        cones_right_in = cones_right_in - pos_a
        angle = -yaw
        if not self.is_drive:
            cones_left_in_rot = np.array([
                self.rotate_point(0, 0, angle, point)
                for point in cones_left_in
            ])
            cones_right_in_rot = np.array([
                self.rotate_point(0, 0, angle, point)
                for point in cones_right_in
            ])
            c_l = cones_left_in_rot.copy()
            c_r = cones_right_in_rot.copy()
            ind_l = np.where(c_l[:, 0] > 0)
            ind_r = np.where(c_r[:, 0] > 0)
            c_l = c_l[ind_l]
            c_r = c_r[ind_r]
            c_ = np.concatenate([c_l, c_r], axis=0)
            self.c_kB, self.c_kA = self.optimization_cloth(c_l, c_r)
            print(f"Оптимальная клотоида найдена!")
            # Плавные радиусы поворотов
            ss = np.linspace(0, self.c_lenght, 30)
            self.opt_radius = self.c_lenght / ((self.c_kB - self.c_kA) * ss)
            self.is_drive = True

        if self.i > 28:
            self.is_drive = False
            self.i = 0
        r = self.opt_radius[int(self.i)]
        self.i = self.i + 1
        if self.c_kB > 0:

            s = self.steering_func_p(r)
            print("+")
        else:
            s = self.steering_func_n(r)
            print("-")
        # if r>80:
        #    s  = 0
        print(f"Оптимальный радиус поворота {r}")
        print(f"Оптимальный угол поворота в AirSim {s}")
        return s, self.c_kB, self.c_kA, self.c_lenght
Пример #24
0
    def move(self, distance):
        """This function will move the platform a given distance in m.

        :param distance in meters.
        """
        yaw = (
            airsim.to_eularian_angles(
                self.client.getMultirotorState(vehicle_name=self.name).kinematics_estimated.orientation)[2])
        position = self.get_position()
        offset = list(self.calculate_offset(distance, yaw))
        self.move_a_to_b(
            airsim.Vector3r(position.x_val + offset[0],
            position.y_val + offset[1],
            position.z_val))
        time.sleep(self.delay)
Пример #25
0
def _move_by_positions(client: airsim.MultirotorClient,
                       args: argparse.Namespace) -> None:
    for position, orientation in args.viewpoints:
        _pitch, _roll, yaw = airsim.to_eularian_angles(
            airsim.Quaternionr(*orientation))

        client.moveToPositionAsync(
            *position,
            args.flight_velocity,
            drivetrain=airsim.DrivetrainType.MaxDegreeOfFreedom,
            yaw_mode=airsim.YawMode(is_rate=False, yaw_or_rate=yaw),
        ).join()

        client.hoverAsync().join()
        time.sleep(2)
Пример #26
0
 def yawrateVzController(self, cent, angle):
     """
     - function: 通常使用的线速度加偏航角速度控制方法
     - params:
         - cent: 目标在图像上的坐标
         - q: 飞机在世界坐标系的四元数
     - return:
         - cmd: [vx, vy, vz, yawrate],世界坐标系下的线速度与偏航角速度
     """
     ex, ey = cent[0] - self.width / 2, cent[1] - self.height / 2
     pitch, roll, yaw = airsim.to_eularian_angles(q)
     print(pitch, roll, yaw)
     return self.velocity*np.cos(yaw), \
            self.velocity*np.sin(yaw), \
            self.kz*ey, \
            self.kx*ex
Пример #27
0
    def get_cone_arc(self):
        R = 16
        FOV = 180
        pos_a = self.client.simGetVehiclePose().position.to_numpy_array()[0:2]
        orientation = airsim.to_eularian_angles(
            self.client.simGetVehiclePose().orientation)
        yaw = orientation[2]
        arc_pnts = self.get_arc_pnts(R, np.radians(FOV), yaw, pos_a[0],
                                     pos_a[1])

        cones_left_in_circle, cones_right_in_circle = self.get_cone(R)
        cones_left_in_arc = self.find_in_contour(cones_left_in_circle,
                                                 arc_pnts, R, np.radians(FOV),
                                                 pos_a[0], pos_a[1], yaw)
        cones_right_in_arc = self.find_in_contour(cones_right_in_circle,
                                                  arc_pnts, R, np.radians(FOV),
                                                  pos_a[0], pos_a[1], yaw)
        return cones_left_in_arc, cones_right_in_arc
Пример #28
0
    def build_camera_to_world(self, position, orientation):
        # Camera coordinate frame (Z forward, X right, Y down)
        # doesn't correspond to the world frame (X forward, Y right, Z down)
        # Thus we should swap axes: XYZ -> YZX
        M = np.eye(4, dtype=np.float32)
        M[0:3, 3] = np.array(
            [position['y_val'], position['z_val'], position['x_val']])

        (pitch, roll, yaw) = airsim.to_eularian_angles(
            namedtuple('Struct', orientation.keys())(*orientation.values()))
        R = self.build_rot_mat(roll=roll, pitch=pitch, yaw=yaw)

        # quat = np.array([orientation['x_val'], orientation['y_val'], orientation['z_val'], orientation['w_val']])
        # R = self.build_rot_mat_from_quat(quat)

        M[0:3, 0:3] = R

        return M
Пример #29
0
def to3D(pixelX, pixelY, camInfo, depthImage, color=[], maxDistView = None):
    """From image pixels (2D) to relative(!) 3D coordinates"""

    if type(depthImage) == str:
        depth,s = airsim.read_pfm(depthImage)
    else:
        depthData = depthImage.image_data_float
        depthArray = np.array(depthData)
        depth = np.reshape(depthArray, (depthImage.height, depthImage.width))
        if isinstance(maxDistView, float) or isinstance(maxDistView, int):
            depth[depth>maxDistView] = maxDistView

    height, width = depth.shape
    # print(f"Image size: width:{width} -- height:{height}")
    halfWidth = width/2
    halfHeight= height/2

    camPitch, camRoll,camYaw = airsim.to_eularian_angles(camInfo.pose.orientation)

    # to rads (its on degrees now)
    hFoV = np.radians(camInfo.fov)
    vFoV = (height/width)*hFoV

    pointsH = np.array(pixelY, dtype=int)
    pointsW = np.array(pixelX, dtype=int)

    pixelPitch = ((pointsH-halfHeight)/halfHeight) * (vFoV/2)
    pixelYaw = ((pointsW-halfWidth)/halfWidth) * (hFoV/2)

    theta = (np.pi/2) - pixelPitch
    # turn
    phi = pixelYaw

    r = depth[ pointsH, pointsW]
    idx = np.where(r<100)

    x = r*np.sin(theta)*np.cos(phi)
    y = r*np.sin(theta)*np.sin(phi)
    z = r*np.cos(theta)

    if len(color) != 0:
        return x[idx],y[idx],z[idx],color[idx]
    else:
        return x[idx],y[idx],z[idx]
Пример #30
0
    def record(self):

        assert self.is_start_recording == True, "The recording didn't start"
        time_stamp_s, time_stamp_ns = str(time.time()).split('.')
        time_stamp = time_stamp_s + time_stamp_ns + "00"
        lidarData = self.client.getLidarData()
        if (len(lidarData.point_cloud) < 3):
            print("\tNo points received from Lidar data")
        else:
            # print("\tReading %d: time_stamp: %d number_of_points: %d" % (i, lidarData.time_stamp, len(points)))
            # print("\t\tlidar position: %s" % (pprint.pformat(lidarData.pose.position)))
            # print("\t\tlidar orientation: %s" % (pprint.pformat(lidarData.pose.orientation)))
            points = self.parse_lidarData(lidarData)
            x, y, z = self.client.getLidarData().pose.position
            pitch, yaw, roll = airsim.to_eularian_angles(
                self.client.getImuData().orientation)
            self.write_to_file_sync(
                f"{x} {y} {z} {pitch} {yaw} {roll} {time_stamp}\n")
            self.write_lidarData_to_disk(
                points, os.path.join(self.path_data, str(time_stamp)))
Пример #31
0
# In settings.json first activate computer vision mode: 
# https://github.com/Microsoft/AirSim/blob/master/docs/image_apis.md#computer-vision-mode

import setup_path 
import airsim

client = airsim.VehicleClient()
client.confirmConnection()

pose = client.simGetVehiclePose()
print("x={}, y={}, z={}".format(pose.position.x_val, pose.position.y_val, pose.position.z_val))

angles = airsim.to_eularian_angles(client.simGetVehiclePose().orientation)
print("pitch={}, roll={}, yaw={}".format(angles[0], angles[1], angles[2]))

pose.position.x_val = pose.position.x_val + 1
client.simSetVehiclePose(pose, True)