Example #1
0
    def move_camera(self, t):
        """
        This function moves the camera in a circle around the table.
        After 1000 steps the camera moves lower (to get a different angle and maybe more
        nice data)

        TODO: Maybe other camera movement?

        :param t: step
        :param camera: camera frame
        :param angle: angle in which camera need to move
        :param radius: around which camera is moving
        :return: updated angle
        """
        cam_px, cam_py, cam_pz = self.camera.getPosition()
        cam_q1, cam_q2, cam_q3, cam_q4 = self.camera.getQuaternion()
        q = Quaternion(cam_q1, cam_q2, cam_q3, cam_q4)
        e1, e2, e3 = q.to_euler(degrees=True)

        cam_px_new = cam_px + np.cos(self.angle) * self.radius
        cam_py_new = cam_py + np.sin(self.angle) * self.radius
        if t % 1000 == 0:
            cam_pz = cam_pz - 0.05
            e1 = e1 + 2.5

        quat_new = Quaternion.from_euler(e1, e2, e3 + 5, degrees=True)
        self.camera.setPosition([cam_px_new, cam_py_new, cam_pz])
        self.camera.setQuaternion([quat_new[0], quat_new[1], quat_new[2], quat_new[3]])
        self.angle += 0.0872665

        if t == 12000:
            self.camera.setPosition([0.6, -.75, 1.85])
            self.camera.setQuaternion(self.cam_quat)
            self.angle = 0
            self.radius = 0.075
Example #2
0
 def callback_odom(self, msg):
     if self.rosb:
         self.last_odom = msg
         self.T = np.array((msg.pose.pose.position.x,
                            msg.pose.pose.position.y)).reshape(1, 2)
         q = Quaternion(msg.pose.pose.orientation.x,
                        msg.pose.pose.orientation.y,
                        msg.pose.pose.orientation.z,
                        msg.pose.pose.orientation.w)
         (r, p, y) = q.to_euler()
         self.theta = r
         self.R_plus = np.array([[np.cos(self.theta), -np.sin(self.theta)],
                                 [np.sin(self.theta),
                                  np.cos(self.theta)]]).reshape(2, 2)
         self.R_minus = np.array(
             [[np.cos(-self.theta), -np.sin(-self.theta)],
              [np.sin(-self.theta),
               np.cos(-self.theta)]]).reshape(2, 2)
         self.pose = np.array(
             [msg.pose.pose.position.x, msg.pose.pose.position.y, y],
             dtype=np.float32)
         self.pose_history_x.append(self.pose[0])
         self.pose_history_y.append(self.pose[1])
         self.thetta_history.append(self.theta)
         self.rosb = False
Example #3
0
def show_image_pose(frame, pose):
    cv2.imshow('frame', frame)

    if (cv2.waitKey(25) & 0xFF == ord('q')):
        return False

    posx = pose.pose.position.x
    posy = pose.pose.position.y
    posz = pose.pose.position.z

    w = pose.pose.orientation.w
    x = pose.pose.orientation.x
    y = pose.pose.orientation.y
    z = pose.pose.orientation.z

    q = Quaternion(w, x, y, z)
    e = q.to_euler(degrees=True)
    roll = float(e[0])
    pitch = float(e[1])
    yaw = float(e[2])

    print("orientation:", roll, pitch, yaw)
    print("position:", posx, posy, posz)
    print("------------------------------")

    return True
Example #4
0
def save_imu_pose(client, filename, id):
    filename = filename + '.jpg'
    vehicle_name = "Drone" + str(id)
    imu_data = client.getImuData(vehicle_name=vehicle_name)
    state = client.getMultirotorState(vehicle_name=vehicle_name)

    lon = state.kinematics_estimated.position.y_val + real_init_pos[id][
        0]  # x val
    lat = state.kinematics_estimated.position.x_val + real_init_pos[id][
        1]  # y val
    alt = state.kinematics_estimated.position.z_val

    w = imu_data.orientation.w_val
    x = imu_data.orientation.x_val
    y = imu_data.orientation.y_val
    z = imu_data.orientation.z_val

    q = Quaternion(w, x, y, z)
    e = q.to_euler(degrees=True)
    roll = float(e[0])
    pitch = float(e[1])
    yaw = float(e[2])

    st = (os.path.basename(filename)) + "," + '%.3f'%lon + "," + \
        '%.3f'%lat + "," + '%.3f'%alt + "," + \
        '%.3f'%yaw + "," + '%.3f'%pitch + "," + \
        '%.3f'%roll + "\n"
    print(st)
    global file_handle
    file_handle.write(st)
Example #5
0
def cb(data):
    global f, count, bridge
    try:
        curr_img = bridge.imgmsg_to_cv2(data.image, "bgr8")
    except CvBridgeError as e:
        print(e)

    w = data.pose.pose.orientation.w
    x = data.pose.pose.orientation.x
    y = data.pose.pose.orientation.y
    z = data.pose.pose.orientation.z

    lon = data.pose.pose.position.x
    lat = data.pose.pose.position.y
    alt = data.pose.pose.position.z

    q = Quaternion(w, x, y, z)
    e = q.to_euler(degrees=True)
    roll = float(e[0])
    pitch = float(e[1])
    yaw = float(e[2])

    st = str (count) + ".jpg" + "," + '%.3f'%lon + "," + \
        '%.3f'%lat + "," + '%.3f'%alt + "," + \
        '%.3f'%yaw + "," + '%.3f'%pitch + "," + \
        '%.3f'%roll + "\n"
    print(st)
    f.write(st)
    cv2.imwrite(dirname + '/' + str(count) + '.jpg', curr_img)
    count += 1
Example #6
0
    def cb(self, data):
        try:
            curr_img = self.bridge.imgmsg_to_cv2(data.image, "bgr8")
        except CvBridgeError as e:
            print(e)

        w = data.pose.pose.orientation.w
        x = data.pose.pose.orientation.x
        y = data.pose.pose.orientation.y
        z = data.pose.pose.orientation.z

        pos_x = data.pose.pose.position.x
        pos_y = data.pose.pose.position.y
        pos_z = data.pose.pose.position.z

        q = Quaternion(w, x, y, z)
        e = q.to_euler(degrees=True)
        roll = float(e[0])
        pitch = float(e[1])
        yaw = float(e[2])

        self.pose = [pos_x, pos_y, pos_z, yaw, roll, pitch]

        print("pose:", self.pose)
        self.combine(curr_img, self.pose)
Example #7
0
 def robot_pose_cb(self, data):
     self.robot_x = data.pose.pose.position.x
     self.robot_y = data.pose.pose.position.y
     q = Quaternion(data.pose.pose.orientation.w,
                    data.pose.pose.orientation.x,
                    data.pose.pose.orientation.y,
                    data.pose.pose.orientation.z)
     self.robot_th = q.to_euler(degrees=False)[2]
Example #8
0
 def update_pose(self, data):
     try:
         self.pose.x = data.pose[self.robot_id].position.x
         self.pose.y = data.pose[self.robot_id].position.y
         quaternion = Quaternion(data.pose[self.robot_id].orientation.x,
                                 data.pose[self.robot_id].orientation.y,
                                 data.pose[self.robot_id].orientation.z,
                                 data.pose[self.robot_id].orientation.w)
         self.pose.theta = (quaternion.to_euler()[0] / pi) + 0.25
     except IndexError:
         None
Example #9
0
    def callback_odom(self, msg):

        self.T = np.array((msg.pose.pose.position.x,
                           msg.pose.pose.position.y)).reshape(1, 2)
        q = Quaternion(msg.pose.pose.orientation.x,
                       msg.pose.pose.orientation.y,
                       msg.pose.pose.orientation.z,
                       msg.pose.pose.orientation.w)
        (r, p, y) = q.to_euler()
        theta = r
        self.R_plus = np.array([[np.cos(theta), -np.sin(theta)],
                                [np.sin(theta), np.cos(theta)]]).reshape(2, 2)
        self.R_minus = np.array([[np.cos(-theta), -np.sin(-theta)],
                                 [np.sin(-theta),
                                  np.cos(-theta)]]).reshape(2, 2)
Example #10
0
    def pose_cb(self, data):
        w = data.pose.orientation.w
        x = data.pose.orientation.x
        y = data.pose.orientation.y
        z = data.pose.orientation.z

        pos_x = data.pose.position.x
        pos_y = data.pose.position.y
        pos_z = data.pose.position.z

        q = Quaternion(w, x, y, z)
        e = q.to_euler(degrees=True)
        roll = float(e[0])
        pitch = float(e[1])
        yaw = float(e[2])

        self.pose = [pos_x, pos_y, pos_z, yaw, roll, pitch]
Example #11
0
def pose_callback(data):
    global yaw, pitch, roll
    global lon, lat, alt

    w = data.pose.orientation.w
    x = data.pose.orientation.x
    y = data.pose.orientation.y
    z = data.pose.orientation.z

    lon = float(data.pose.position.x) + float(offset[0])
    lat = float(data.pose.position.y) + float(offset[1])
    alt = float(data.pose.position.z) + float(offset[2])

    q = Quaternion(w, x, y, z)
    e = q.to_euler(degrees=True)
    roll = float(e[0])
    pitch = float(e[1])
    yaw = float(e[2])
Example #12
0
    def callback_modelstates(self, msg):

        for i in range(len(msg.name)):
            if (msg.name[i][0:5] == "actor"):
                actor_id = int(msg.name[i][5])

                posx = msg.pose[i].position.x
                posy = msg.pose[i].position.y
                posz = msg.pose[i].position.z

                x = msg.pose[i].orientation.x
                y = msg.pose[i].orientation.y
                z = msg.pose[i].orientation.z
                w = msg.pose[i].orientation.w

                q = Quaternion(w, x, y, z)
                e = q.to_euler(degrees=True)

                self.pedestrian_pose[actor_id] = [posx, posy, e[2]]
Example #13
0
def dwa_wrapper(final_list):

    odom_dec = {}
    q = Quaternion(final_list[0].pose.pose.orientation.w,
                   final_list[0].pose.pose.orientation.x,
                   final_list[0].pose.pose.orientation.y,
                   final_list[0].pose.pose.orientation.z)
    e = q.to_euler(degrees=False)
    odom_dec["x"] = final_list[0].pose.pose.position.x
    odom_dec["y"] = final_list[0].pose.pose.position.y
    odom_dec["theta"] = e[2]
    odom_dec["u"] = final_list[0].twist.twist.linear.x
    odom_dec["omega"] = final_list[0].twist.twist.angular.z

    cnfg = Config(odom_dec, final_list[1])
    obs = Obstacles(final_list[2].ranges, cnfg)
    v_list, w_list, cost_list = DWA(cnfg, obs)

    return v_list, w_list, cost_list, final_list[3]
Example #14
0
def Angles():
    streamMotion = resolve_stream('type', 'Motion') # Resolve stream
    inlet2 = StreamInlet(streamMotion[0])   # Create a new inlet to read Motion data from the stream
    sample, timestamp = inlet2.pull_sample() # Get a new sample with timestamp

    # Motion data is received as Quaternion
    # This converts quaternion to angles with degrees as units
    # The Q1,Q2,Q3,Q4 are at index 3,4,5,6 ofthe array input
    q = Quaternion(sample[3], sample[4], sample[5], sample[6])
    e = q.to_euler(degrees=True)

    # Processes/stores the angles correctly
    angles = []
    angles.append(e)
    angles = np.array(angles)
    angles.flatten()
    np.rint([angles])
    print(angles)

    # Angles are stored in an array, we return them separated
    return angles[0][0],angles[0][1],angles[0][2]
Example #15
0
    def _get_obs(self):
        """
        Here we define what sensor data defines our robots observations
        To know which Variables we have acces to, we need to read the
        TurtleBot2Env API DOCS
        :return:
        """
        start_tt = time.time()
        rospy.logdebug("Start Get Observation ==>")
        # We get the laser scan data
        laser_scan = self.get_laser_scan()
        rospy.logdebug("BEFORE DISCRET _episode_done==>" +
                       str(self._episode_done))

        discretized_observations = self.discretize_observation(
            laser_scan, self.new_ranges)

        rospy.logdebug("Observations==>" + str(discretized_observations))
        rospy.logdebug("AFTER DISCRET_episode_done==>" +
                       str(self._episode_done))
        rospy.logdebug("END Get Observation ==>")
        discretized_observations = numpy.asarray(discretized_observations)

        # New code for getting observations based on DWA
        odom_data = self.get_odom()
        self.odom_dict = {}
        self._reached_goal = False
        q = Quaternion(odom_data.pose.pose.orientation.w,
                       odom_data.pose.pose.orientation.x,
                       odom_data.pose.pose.orientation.y,
                       odom_data.pose.pose.orientation.z)
        e = q.to_euler(degrees=False)
        self.odom_dict["x"] = odom_data.pose.pose.position.x
        self.odom_dict["y"] = odom_data.pose.pose.position.y
        self.odom_dict["theta"] = e[2]
        self.odom_dict["u"] = odom_data.twist.twist.linear.x
        self.odom_dict["omega"] = odom_data.twist.twist.angular.z
        cnfg = Config(self.odom_dict, self.goal_pose)
        self.obs = Obstacles(laser_scan.ranges, cnfg)

        if (self.n_skipped_count == 0):
            self.obs_list_stacked = numpy.delete(self.obs_list_stacked, (0, 1),
                                                 1)
            self.obs_list_stacked = numpy.append(self.obs_list_stacked,
                                                 self.obs.obst, 1)
            self.n_skipped_count += 1

        elif (self.n_skipped_count < self.n_skipped_frames):
            self.obs_list_stacked[:, ((2 * self.n_stacked_frames) - 2):((
                2 * self.n_stacked_frames))] = self.obs.obst
            self.n_skipped_count += 1

        elif (self.n_skipped_count == self.n_skipped_frames):
            self.obs_list_stacked[:, ((2 * self.n_stacked_frames) - 2):((
                2 * self.n_stacked_frames))] = self.obs.obst
            self.n_skipped_count = 0

        # print("The stacked obs list {}".format(self.obs_list_stacked))
        # print("The stacked obs list part {}".format(self.obs_list_stacked[:5,:]))

        self.v_matrix, self.w_matrix, self.cost_matrix, self.obst_cost_matrix, self.to_goal_cost_matrix = DWA(
            cnfg, self.obs_list_stacked, self.n_stacked_frames)

        # print("The w_matrix after {}".format(self.w_matrix[:5,:]))
        # print("The w_matrix {}".format(self.w_matrix[:,self.n_stacked_frames - 1]))
        # print("The cost_matrix {}".format(self.cost_matrix[:,self.n_stacked_frames - 1]))

        if (self.visualize_obs == True) and (self.robot_number == 0):
            self.viz_obs()

        # self.stacked_obs = numpy.stack((self.v_matrix, self.w_matrix, self.cost_matrix), axis=2)

        self.stacked_obs = numpy.stack(
            (self.v_matrix, self.w_matrix, self.obst_cost_matrix,
             self.to_goal_cost_matrix),
            axis=2)
        # self.obst_cost_matrix = self.obst_cost_matrix.flatten('F')
        # self.to_goal_cost_matrix = self.to_goal_cost_matrix.flatten('F')

        # self.stacked_obs = numpy.concatenate((self.obst_cost_matrix, self.to_goal_cost_matrix), axis=0)
        # self.stacked_obs = numpy.expand_dims(self.stacked_obs, axis=1)

        self.current_distance2goal = self._get_distance2goal()
        if (self.current_distance2goal < 0.5):
            self._episode_done = True
            self._reached_goal = True

        return self.stacked_obs
Example #16
0
    def _init_env_variables(self):
        """
        Inits variables needed to be initialised each time we reset at the start
        of an episode.
        :return:
        """
        # For Info Purposes
        self.cumulated_reward = 0.0
        # Set to false Done, because its calculated asyncronously
        self._episode_done = False

        # We wait a small ammount of time to start everything because in very fast resets, laser scan values are sluggish
        # and sometimes still have values from the prior position that triguered the done.
        time.sleep(1.0)

        # TODO: Add reset of published filtered laser readings
        laser_scan = self.get_laser_scan()
        discretized_ranges = laser_scan.ranges

        odom_data_init = self.get_odom()
        odom_dict_init = {}
        q = Quaternion(odom_data_init.pose.pose.orientation.w,
                       odom_data_init.pose.pose.orientation.x,
                       odom_data_init.pose.pose.orientation.y,
                       odom_data_init.pose.pose.orientation.z)
        e = q.to_euler(degrees=False)
        odom_dict_init["x"] = odom_data_init.pose.pose.position.x
        odom_dict_init["y"] = odom_data_init.pose.pose.position.y
        odom_dict_init["theta"] = e[2]
        odom_dict_init["u"] = odom_data_init.twist.twist.linear.x
        odom_dict_init["omega"] = odom_data_init.twist.twist.angular.z
        cnfg = Config(odom_dict_init, self.goal_pose)
        obs_init = Obstacles(laser_scan.ranges, cnfg)
        self.obs_list_stacked = numpy.column_stack(
            (obs_init.obst for _ in range(0, self.n_stacked_frames)))

        self.counter = 1
        self.episode_num = self.episode_num + 1

        init_obs = self._get_obs()
        self.previous_distance2goal = self._get_distance2goal()
        self.publish_filtered_laser_scan(
            laser_original_data=laser_scan,
            new_filtered_laser_range=discretized_ranges)
        self.total_collisions += self.episode_collisions
        print("Total number of collsions ------------ {}".format(
            self.total_collisions))
        self.episode_collisions = 0
        self.n_skipped_count = 0

        file = open('VelList.csv', 'w')
        file_ang = open('angularVelList.csv', 'w')
        with file:
            write = csv.writer(file)
            write.writerows(self.list_vel)
            write_ang = csv.writer(file_ang)
            write_ang.writerows(self.list_angular_vel)

        self.list_vel.append([
            "lower_limit", "upper_limit", "current_velocity",
            "Violation_Status"
        ])
        self.list_angular_vel.append([
            "lower_limit", "upper_limit", "current_velocity",
            "Violation_Status"
        ])
Example #17
0
    def update_point(self, current_state, request_velocity):
        self.state = np.array([
            current_state.pose.position.x, current_state.pose.position.y,
            current_state.pose.position.z, current_state.pose.orientation.z
        ])
        self.x_controller.maxOut = request_velocity
        self.y_controller.maxOut = request_velocity
        self.altitude_controller.maxOut = request_velocity

        # Drawing a vector from our current position to a target position
        output_vector = np.empty(3)
        output_vector[0] = self.target[0] - self.state[0]
        output_vector[1] = self.target[1] - self.state[1]
        output_vector[2] = self.target[2] - self.state[2]

        # Normalizing vector
        output_vector = output_vector / (output_vector[0]**2 + output_vector[1]
                                         **2 + output_vector[2]**2)**.5

        time = rospy.get_time()

        x_factor = self.x_controller.update(self.target[0], self.state[0],
                                            time)
        y_factor = self.y_controller.update(self.target[1], self.state[1],
                                            time)
        altitude_factor = self.altitude_controller.update(
            self.target[2], self.state[2], time)

        # Multiplying each controller factor on the vector component it corresponds to...
        output_vector[0] *= abs(x_factor)
        output_vector[1] *= abs(y_factor)
        output_vector[2] *= abs(altitude_factor)

        #string = "x: " + str(output_vector[0]) + " y: " + str(output_vector[1]) + " z: " + str(output_vector[2])
        #rospy.loginfo_throttle(0.5, string)

        # Creating the output Twist message
        output = Twist()
        output.linear.x = output_vector[0]
        output.linear.y = output_vector[1]
        output.linear.z = output_vector[2]

        # Testing just raw PID instead
        #output.linear.x = x_factor
        #output.linear.y = y_factor
        #output.linear.z = altitude_factor

        # TODO: Yaw controller

        # The current_state.orientation is a Pose message, and therefore actually a quaternion, which means...
        # ... that we have to translate the quaternion into euler form so that we can compare the two correctly for the PID controller :)
        current_quat = Quaternion(current_state.pose.orientation.w,
                                  current_state.pose.orientation.x,
                                  current_state.pose.orientation.y,
                                  current_state.pose.orientation.z)
        current_euler = current_quat.to_euler()

        # Since the PID controller calculate the error itself and I dont wanna custimize it, I ghetto fix and create a fake error myself with the rules that apply for the yaw
        error = (current_euler[2] + math.pi) - self.target[3]
        #error = (error + 180) % 360 - 180
        if error > math.pi:
            error -= math.pi * 2
        elif error < -math.pi:
            error += math.pi * 2

        yaw_input = self.Yaw.update(0, error, time)

        #rospy.loginfo(self.target[3])
        #rospy.loginfo(current_euler[2])

        output.angular.z = yaw_input

        return output
Example #18
0
 def __odom_callback(self, msg: Odometry):
     orientation_q = msg.pose.pose.orientation
     q = Quaternion(orientation_q.w, orientation_q.x, orientation_q.y,
                    orientation_q.z)
     (self.__roll, self.__pitch, self.__yaw) = q.to_euler()