Exemplo n.º 1
0
    def _wait_for_tf_init(self):
        """
        Waits until all needed frames are present in tf.

        :rtype: None
        """
        self.tfBuffer.lookup_transform(
            self.handeye_parameters.robot_base_frame,
            self.handeye_parameters.robot_effector_frame, Time(0),
            Duration(20))
        self.tfBuffer.lookup_transform(
            self.handeye_parameters.tracking_base_frame,
            self.handeye_parameters.tracking_marker_frame, Time(0),
            Duration(60))
Exemplo n.º 2
0
def get_begin_end(time_rules, bag_file):
    # Find the min/max relative time (0 at the beginning of the bag file)
    t_min = None
    t_max = None
    for r in time_rules:
        if r.is_time() and r.is_include():
            if t_min is None or r.token_from < t_min:
                t_min = r.token_from
            if t_max is None or r.token_to > t_max:
                t_max = r.token_to

    t_start, t_end = bag_file.get_start_time(), bag_file.get_end_time()
    t_min = t_start if t_min is None else t_min + t_start
    t_max = t_end if t_max is None else t_max + t_start
    return Time(t_min), Time(t_max)
Exemplo n.º 3
0
    def get_closest_qr(self, qrs):
        print qrs
        closet_qr = qrs[0]
        min_distance = 1000

        if len(qrs) == 1:
            return closet_qr

        # self.current_pose = self.explorer.pose_stamped.pose
        for qr in qrs:
            try:
                (trans, rot) = self.transform_listener.lookupTransform(
                    qr.frame_id, '/base_footprint', Time(3))
            except:
                log.error(
                    "Transform listener failed to acquire transformation")
                return closet_qr
            current_pose = Pose()
            current_pose.position.x = trans[0]
            current_pose.position.y = trans[1]
            current_pose.position.z = trans[2]
            target_pose = qr.qrPose.pose
            target_distance = distance_2d(target_pose, current_pose)
            if target_distance < min_distance:
                min_distance = target_distance
                closet_qr = qr

        return closet_qr
Exemplo n.º 4
0
    def make_marker_stub(self, stamped_pose, size=None, color=None):
        if color is None:
            color = (0.5, 0.5, 0.5, 1.0)
        if size is None:
            size = (1, 1, 1)

        marker = Marker()
        marker.header = stamped_pose.header
        # marker.ns = "collision_scene"
        marker.id = self.next_id
        marker.lifetime = Time(0)  # forever
        marker.action = Marker.ADD
        marker.pose = stamped_pose.pose
        marker.color.r = color[0]
        marker.color.g = color[1]
        marker.color.b = color[2]
        if len(color) == 4:
            alpha = color[3]
        else:
            alpha = 1.0
        marker.color.a = alpha
        marker.scale.x = size[0]
        marker.scale.y = size[1]
        marker.scale.z = size[2]
        self.next_id += 1
        return marker
Exemplo n.º 5
0
    def choose_target(self, targets):
        """ Choose the neareset possible target. """

        # Should never be called with empty targets.
        if not targets:
            log.error('choose_target was called with no targets.')
            return None

        closest_target = targets[0]
        min_distance = 1000

        if len(targets) == 1:
            return closest_target

        # self.current_pose = self.explorer.pose_stamped.pose
        for target in targets:
            try:
                (trans, rot) = self.transform_listener.lookupTransform(target.victimPose.header.frame_id,
                                                                       '/base_footprint',
                                                                       Time(0))
            except:
                log.error("Transform listener failed to acquire transformation")
                return closest_target
            self.current_pose = Pose()
            self.current_pose.position.x = trans[0]
            self.current_pose.position.y = trans[1]
            self.current_pose.position.z = trans[2]
            target_pose = target.victimPose.pose
            target_distance = distance_2d(target_pose, self.current_pose)
            if target_distance < min_distance:
                min_distance = target_distance
                closest_target = target

        return closest_target
Exemplo n.º 6
0
def dicttopath(dic):
    path = Path()
    path.header.frame_id = dic["frame_id"]
    for point in dic["points"]:
        path.poses.append(list_to_pose(point["pose"],
                                       frame_id=dic["frame_id"]))
        path.poses[-1].header.stamp = Time(point["time"])
    return path
Exemplo n.º 7
0
 def get_relative_state(self):
     # the source and target frames have been changed here b/c
     # we dont want to find the position of nhbr wrt to 'this' (self's) frame
     # Maybe implement twist transform ...
     state = {}
     for nhbr in self.neighbours:
         source_frame = "{}_{}/base_link".format(self.model, self._id)
         target_frame = "{}_{}/base_link".format(self.model, nhbr)
         Ptransform = self.tf_listener.lookupTransform(
             source_frame, target_frame, Time())
         state[nhbr] = Ptransform
     return state
Exemplo n.º 8
0
    def getImg(self, index=0):
        i = index
        st = Time(self._bag_timestamp[i].secs,
                  self._bag_timestamp[i].nsecs - 1e6)
        if i > 0:
            st = Time(self._bag_timestamp[i - 1].secs,
                      self._bag_timestamp[i - 1].nsecs + 1e6)
        et = Time(self._bag_timestamp[i].secs, self._bag_timestamp[i].nsecs)
        for topic, msg, t in self._bag.read_messages(topics=self.msg,
                                                     start_time=st,
                                                     end_time=et):
            if topic == self.msg[0]:
                self.img_0 = self.bridge.imgmsg_to_cv2(msg, self.img_type)
                # self.img_prv[:, :self.img_w] = self.bridge.imgmsg_to_cv2(msg, self.img_type)
            if topic == self.msg[1]:
                self.img_1 = self.bridge.imgmsg_to_cv2(msg, self.img_type)
                # self.img_prv[:, self.img_w:(self.img_w * 2)] = self.bridge.imgmsg_to_cv2(msg, self.img_type)
            # if topic == '/odometry':
            #     odom = msg

        return self.img_0, self.img_1
    def join_tree_srv_function(self, command):

        if self.debug:
            print('\x1b[33;1m' + str('Joining the Two Separate Frame Tree!') +
                  '\x1b[0m')
        # Set Flag to True
        self.join_trees = command.status
        # Check if we can calculate the TF Between Drones Bottom Camera and Ar Tag
        while not self.tf_listen.can_transform(
                self.drone_odom_frame, self.map_frame, Time(0), Duration(1.0)):
            pass
        # Return From Service Once Tree is Joint
        return True
    def calculate_error_from_tag(self):

        # Calculate_TF and get Pose Error
        if self.can_transform() and self.marker_detected:

            # Get the most recent TF's
            tf_ = self.tf.lookup_transform(self.ar_pose_frame,
                                           self.drone_camera_frame, Time(0),
                                           Duration(1.0))
            h_tf = self.tf.lookup_transform(self.drone_base_footprint,
                                            self.drone_base_link, Time(0),
                                            Duration(1.0))
            # The Height that Drone is currently fly
            height = h_tf.transform.translation.z
            # Calculate Yaw from TF
            (_, _, yaw) = euler_from_quaternion([
                tf_.transform.rotation.x, tf_.transform.rotation.y,
                tf_.transform.rotation.z, tf_.transform.rotation.w
            ])
            yaw += pi
            # Manipulate Yaw to Rotate Drone in the Right Direction
            if np.rad2deg(yaw) > 180:
                yaw -= 2 * pi
            # Get the Rotation Matrix
            (r1, r2, r3, _) = quaternion_matrix([
                tf_.transform.rotation.x, tf_.transform.rotation.y,
                tf_.transform.rotation.z, tf_.transform.rotation.w
            ])
            rotation_mat = np.asmatrix([r1[0:3], r2[0:3], r3[0:3]])
            # Get Position in Matrix Form
            pose_mat = np.asmatrix([[tf_.transform.translation.x],
                                    [tf_.transform.translation.y],
                                    [tf_.transform.translation.z]])
            # Calculate the Cartesian Error
            error = rotation_mat * pose_mat

            return [error.item(0), error.item(1), error.item(2), yaw, height]
        else:
            return [-1, -1, -1, -1, -1]
Exemplo n.º 11
0
    def str_to_ros_time(string):
        # time module supports up to microsecond resolution (ROS standard is nanosecond)

        # parse date
        no_nsec = string[:23] if len(string) > 23 else string
        utc_time = time.strptime(no_nsec, "UTC_%Y/%m/%d_%H:%M:%S")
        epoch_time = timegm(utc_time)

        # compute nanoseconds
        nsecs_str = string[24:] if len(string) > 24 else string
        nsecs = int(nsecs_str) * 10**(9 - len(nsecs_str))

        return Time(epoch_time, nsecs)
    def pose_error(self, x, y, z):

        target_to_map = PoseStamped()
        target_to_map.pose.position.x = x
        target_to_map.pose.position.y = y
        target_to_map.pose.position.z = z

        transform = self.tf.lookup_transform(self.drone_base_link,
                                             self.map_frame, Time(0),
                                             Duration(1.0))

        error = do_transform_pose(target_to_map, transform)

        return error.pose.position.x, error.pose.position.y, error.pose.position.z
Exemplo n.º 13
0
 def get_obs(self, apply_preprocessor=True):
     t = time.time()
     print(self, 'GET_OBS')
     obs = super().get_obs(apply_preprocessor=False)
     print('GOT OTHER OBS, WAITING FOR TF ...')
     src = '/{}/base_link'.format(self.ns)
     nbhrs = list(self.tf_obs.keys())
     while nbhrs:
         nbhr = nbhrs[-1]
         try:
             tgt = '/{}/base_link'.format(nbhr)
             self.tf_obs[nbhr] = self.tfl.lookupTransform(src, tgt, Time())
             self._is_obs_ready[nbhr] = True
             nbhrs.pop()
         except:
             print('unable2tf b/w {} & {}'.format(nbhr, self.ns))
             continue
     obs.update(self.tf_obs)
     print('{}: Time taken for obs: {}'.format(self.ns, time.time() - t))
     if apply_preprocessor and self.preprocessor is not None:
         return self.preprocessor(obs)
     return obs
 def can_transform(self):
     if self.tf.can_transform(self.ar_pose_frame, self.drone_camera_frame,
                              Time(0), Duration(1.0)):
         return True
     else:
         return False
Exemplo n.º 15
0
    def test_Time(self):
        # This is a copy of test_roslib_rostime
        from rospy import Time, Duration
        # #1600 Duration > Time should fail
        failed = False
        try:
            v = Duration.from_sec(0.1) > Time.from_sec(0.5)
            failed = True
        except:
            pass
        self.failIf(failed, "should have failed to compare")
        try:
            v = Time.from_sec(0.4) > Duration.from_sec(0.1)
            failed = True
        except:
            pass
        self.failIf(failed, "should have failed to compare")

        try:  # neg time fails
            Time(-1)
            failed = True
        except:
            pass
        self.failIf(failed, "negative time not allowed")
        try:
            Time(1, -1000000001)
            failed = True
        except:
            pass
        self.failIf(failed, "negative time not allowed")

        # test Time.now() is within 10 seconds of actual time (really generous)
        import time
        t = time.time()
        v = Time.from_sec(t)
        self.assertEquals(v.to_sec(), t)
        # test from_sec()
        self.assertEquals(Time.from_sec(0), Time())
        self.assertEquals(Time.from_sec(1.), Time(1))
        self.assertEquals(Time.from_sec(v.to_sec()), v)
        self.assertEquals(v.from_sec(v.to_sec()), v)

        # test to_time()
        self.assertEquals(v.to_sec(), v.to_time())

        # test addition
        # - time + time fails
        try:
            v = Time(1, 0) + Time(1, 0)
            failed = True
        except:
            pass
        self.failIf(failed, "Time + Time must fail")

        # - time + duration
        v = Time(1, 0) + Duration(1, 0)
        self.assertEquals(Time(2, 0), v)
        v = Duration(1, 0) + Time(1, 0)
        self.assertEquals(Time(2, 0), v)
        v = Time(1, 1) + Duration(1, 1)
        self.assertEquals(Time(2, 2), v)
        v = Duration(1, 1) + Time(1, 1)
        self.assertEquals(Time(2, 2), v)

        v = Time(1) + Duration(0, 1000000000)
        self.assertEquals(Time(2), v)
        v = Duration(1) + Time(0, 1000000000)
        self.assertEquals(Time(2), v)

        v = Time(100, 100) + Duration(300)
        self.assertEquals(Time(400, 100), v)
        v = Duration(300) + Time(100, 100)
        self.assertEquals(Time(400, 100), v)

        v = Time(100, 100) + Duration(300, 300)
        self.assertEquals(Time(400, 400), v)
        v = Duration(300, 300) + Time(100, 100)
        self.assertEquals(Time(400, 400), v)

        v = Time(100, 100) + Duration(300, -101)
        self.assertEquals(Time(399, 999999999), v)
        v = Duration(300, -101) + Time(100, 100)
        self.assertEquals(Time(399, 999999999), v)

        # test subtraction
        try:
            v = Time(1, 0) - 1
            failed = True
        except:
            pass
        self.failIf(failed, "Time - non Duration must fail")

        class Foob(object):
            pass

        try:
            v = Time(1, 0) - Foob()
            failed = True
        except:
            pass
        self.failIf(failed, "Time - non TVal must fail")

        # - Time - Duration
        v = Time(1, 0) - Duration(1, 0)
        self.assertEquals(Time(), v)

        v = Time(1, 1) - Duration(-1, -1)
        self.assertEquals(Time(2, 2), v)
        v = Time(1) - Duration(0, 1000000000)
        self.assertEquals(Time(), v)
        v = Time(2) - Duration(0, 1000000000)
        self.assertEquals(Time(1), v)
        v = Time(400, 100) - Duration(300)
        self.assertEquals(Time(100, 100), v)
        v = Time(100, 100) - Duration(0, 101)
        self.assertEquals(Time(99, 999999999), v)

        # - Time - Time = Duration
        v = Time(100, 100) - Time(100, 100)
        self.assertEquals(Duration(), v)
        v = Time(100, 100) - Time(100)
        self.assertEquals(Duration(0, 100), v)
        v = Time(100) - Time(200)
        self.assertEquals(Duration(-100), v)
Exemplo n.º 16
0
    def test_Duration(self):
        Duration = rospy.Duration

        # test from_sec
        v = Duration(1000)
        self.assertEquals(v, Duration.from_sec(v.to_sec()))
        self.assertEquals(v, v.from_sec(v.to_sec()))
        v = Duration(0, 1000)
        self.assertEquals(v, Duration.from_sec(v.to_sec()))
        self.assertEquals(v, v.from_sec(v.to_sec()))

        # test neg
        v = -Duration(1, -1)
        self.assertEquals(-1, v.secs)
        self.assertEquals(1, v.nsecs)
        v = -Duration(-1, -1)
        self.assertEquals(1, v.secs)
        self.assertEquals(1, v.nsecs)
        v = -Duration(-1, 1)
        self.assertEquals(0, v.secs)
        self.assertEquals(999999999, v.nsecs)

        # test addition
        failed = False
        try:
            v = Duration(1, 0) + Time(1, 0)
            failed = True
        except:
            pass
        self.failIf(failed, "Duration + Time must fail")
        try:
            v = Duration(1, 0) + 1
            failed = True
        except:
            pass
        self.failIf(failed, "Duration + int must fail")

        v = Duration(1, 0) + Duration(1, 0)
        self.assertEquals(2, v.secs)
        self.assertEquals(0, v.nsecs)
        self.assertEquals(Duration(2, 0), v)
        v = Duration(-1, -1) + Duration(1, 1)
        self.assertEquals(0, v.secs)
        self.assertEquals(0, v.nsecs)
        self.assertEquals(Duration(), v)
        v = Duration(1) + Duration(0, 1000000000)
        self.assertEquals(2, v.secs)
        self.assertEquals(0, v.nsecs)
        self.assertEquals(Duration(2), v)
        v = Duration(100, 100) + Duration(300)
        self.assertEquals(Duration(400, 100), v)
        v = Duration(100, 100) + Duration(300, 300)
        self.assertEquals(Duration(400, 400), v)
        v = Duration(100, 100) + Duration(300, -101)
        self.assertEquals(Duration(399, 999999999), v)

        # test subtraction
        try:
            v = Duration(1, 0) - 1
            failed = True
        except:
            pass
        self.failIf(failed, "Duration - non duration must fail")
        try:
            v = Duration(1, 0) - Time(1, 0)
            failed = True
        except:
            pass
        self.failIf(failed, "Duration - Time must fail")

        v = Duration(1, 0) - Duration(1, 0)
        self.assertEquals(Duration(), v)
        v = Duration(-1, -1) - Duration(1, 1)
        self.assertEquals(Duration(-3, 999999998), v)
        v = Duration(1) - Duration(0, 1000000000)
        self.assertEquals(Duration(), v)
        v = Duration(2) - Duration(0, 1000000000)
        self.assertEquals(Duration(1), v)
        v = Duration(100, 100) - Duration(300)
        self.assertEquals(Duration(-200, 100), v)
        v = Duration(100, 100) - Duration(300, 101)
        self.assertEquals(Duration(-201, 999999999), v)

        # test abs
        self.assertEquals(abs(Duration()), Duration())
        self.assertEquals(abs(Duration(1)), Duration(1))
        self.assertEquals(abs(Duration(-1)), Duration(1))
        self.assertEquals(abs(Duration(0, -1)), Duration(0, 1))
        self.assertEquals(abs(Duration(-1, -1)), Duration(1, 1))
img_prv = np.zeros([img_h, img_w * 2, 3], dtype=np.uint8)
img = np.zeros([img_h, img_w, 3], dtype=np.uint8)

t_start = bag.get_start_time()
s_img0 = bag.get_message_count(topic_filters=[ros_msg[0]])
s_img1 = bag.get_message_count(topic_filters=[ros_msg[2]])

ros_t = []
ros_t.clear()
for topic, msg, t in bag.read_messages(topics=[ros_msg[0]]):
    ros_t.append(t)

# pd.DataFrame(np.array(ros_t)).to_csv("viode_timestamp.csv")

for i in range(int(s_img0)):
    st = Time(ros_t[i].secs, ros_t[i].nsecs - 1e6)
    if i > 0: st = Time(ros_t[i - 1].secs, ros_t[i - 1].nsecs + 1e6)
    et = Time(ros_t[i].secs, ros_t[i].nsecs)
    # print(et.to_nsec() - st.to_nsec())

    for topic, msg, t in bag.read_messages(topics=ros_msg,
                                           start_time=st,
                                           end_time=et):
        if topic == ros_msg[0]:
            img = bridge.imgmsg_to_cv2(msg, "bgr8")

            # if topic == ros_msg[1]:
            #     img_prv[img_h:, :img_w, :] = bridge.imgmsg_to_cv2(msg, "bgr8")
            ...

            # if topic == ros_msg[2]:
Exemplo n.º 18
0
    def loop(self):
        """Running loop to determine if the base link is moving."""
        old_pose = PoseStamped()
        old_pose.header.frame_id = self.params['base_frame']
        old_pose.pose.orientation.w = 1.0
        new_pose = PoseStamped()
        new_pose.header.frame_id = self.params['base_frame']
        new_pose.pose.orientation.w = 1.0
        num_stationary = 0
        total_time = self.params['loop_rate'] * self.params['num_stationary']
        nav_goal = MoveBaseGoal()
        Me.info_message("Starting loop")
        while not is_shutdown():
            try:
                self.tfl.waitForTransform(self.params['world_frame'],
                                          self.params['base_frame'], Time(0),
                                          Duration(2))
                new_pose = PoseStamped()
                new_pose.header.frame_id = self.params['base_frame']
                new_pose.pose.orientation.w = 1.0
                new_pose = self.tfl.transformPose(self.params['world_frame'],
                                                  new_pose)
                dist = \
                    sqrt((new_pose.pose.position.x -
                          old_pose.pose.position.x) ** 2.0 +
                         (new_pose.pose.position.y -
                          old_pose.pose.position.y) ** 2.0)

                if self.params['debug']:
                    Me.info_message("New Pose: " + str(new_pose))
                    Me.info_message("Old Pose: " + str(old_pose))
                    Me.info_message("Dist: " + str(dist))

                if dist <= self.params['tolerance']:
                    num_stationary += 1
                elif dist > self.params['tolerance']:
                    # Movement occurred, zero number
                    num_stationary = 0
                else:
                    Me.error_message("Error with distance/tolerance "
                                     "comparision.")
                    raise KeyboardInterrupt
                if num_stationary >= self.params['num_stationary']:
                    Me.info_message(
                        str(self.params['base_frame']) + " has "
                        "been stationary for required " + str(total_time) +
                        " seconds.")
                    if self.params['debug']:
                        Me.info_message("Polygons in map: " +
                                        str(len(self.map.polygons)))
                        Me.info_message("Right_left: " +
                                        str(self.params['right_left']))
                    # Call Adam's function (self.map, self.params['right_left'],
                    #  new_pose)
                    # self.turtlebot_ac.send_goal(nav_goal)
                    nav_goal.target_pose = \
                        get_viewpoint(self.map, self.params['right_left'],
                                      new_pose)
                    self.turtlebot_ac.send_goal(goal=nav_goal)
                else:
                    current_time = self.params['loop_rate'] * num_stationary
                    Me.info_message(
                        str(self.params['base_frame']) + " has been"
                        " stationary for " + str(current_time) +
                        " seconds but " + str(total_time) +
                        " seconds are needed.")
                # Store old pose
                old_pose = deepcopy(new_pose)
                # Wait before getting next pose
                sleep(self.params['loop_rate'])
            except TfExcept:
                if self.params['debug']:
                    Me.info_message('Transform exception')
                raise TfExcept
            except KeyboardInterrupt:
                break
        return
Exemplo n.º 19
0
def stampToTime(stamp):
    return Time(secs=stamp.secs, nsecs=stamp.nsecs)
Exemplo n.º 20
0
    def update_info_text(self):
        """
        update the displayed info text
        """
        if not self._show_info:
            return
        try:
            if ROS_VERSION == 1:
                (position, rotation) = self.tf_listener.lookupTransform(
                    '/map', self.role_name, Time())
            elif ROS_VERSION == 2:
                transform = self.tf_buffer.lookup_transform(
                    target_frame='map', source_frame=self.role_name, time=Time())
                position = [transform.transform.translation.x,
                            transform.transform.translation.y,
                            transform.transform.translation.z]
                rotation = [transform.transform.rotation.w,
                            transform.transform.rotation.x,
                            transform.transform.rotation.y,
                            transform.transform.rotation.z]
            _, _, yaw = quat2euler(rotation)
            yaw = math.degrees(yaw)
            x = position[0]
            y = position[1]
            z = position[2]
        except (LookupException, ConnectivityException, ExtrapolationException):
            x = 0
            y = 0
            z = 0
            yaw = 0
        heading = 'N' if abs(yaw) < 89.5 else ''
        heading += 'S' if abs(yaw) > 90.5 else ''
        heading += 'E' if 179.5 > yaw > 0.5 else ''
        heading += 'W' if -0.5 > yaw > -179.5 else ''
        fps = 0

        time = str(datetime.timedelta(seconds=self.node.get_time()))[:10]

        if self.carla_status.fixed_delta_seconds:
            fps = 1 / self.carla_status.fixed_delta_seconds
        self._info_text = [
            'Frame: % 22s' % self.carla_status.frame,
            'Simulation time: % 12s' % time,
            'FPS: % 24.1f' % fps, '',
            'Vehicle: % 20s' % ' '.join(self.vehicle_info.type.title().split('.')[1:]),
            'Speed:   % 15.0f km/h' % (3.6 * self.vehicle_status.velocity),
            u'Heading:% 16.0f\N{DEGREE SIGN} % 2s' % (yaw, heading),
            'Location:% 20s' % ('(% 5.1f, % 5.1f)' % (x, y)),
            'GNSS:% 24s' % ('(% 2.6f, % 3.6f)' % (self.latitude, self.longitude)),
            'Height:  % 18.0f m' % z, ''
        ]
        self._info_text += [
            ('Throttle:', self.vehicle_status.control.throttle, 0.0, 1.0),
            ('Steer:', self.vehicle_status.control.steer, -1.0, 1.0),
            ('Brake:', self.vehicle_status.control.brake, 0.0, 1.0),
            ('Reverse:', self.vehicle_status.control.reverse),
            ('Hand brake:', self.vehicle_status.control.hand_brake),
            ('Manual:', self.vehicle_status.control.manual_gear_shift),
            'Gear:        %s' % {
                -1: 'R',
                0: 'N'
            }.get(self.vehicle_status.control.gear, self.vehicle_status.control.gear), ''
        ]
        self._info_text += [('Manual ctrl:', self.manual_control)]
        if self.carla_status.synchronous_mode:
            self._info_text += [('Sync mode running:', self.carla_status.synchronous_mode_running)]
        self._info_text += ['', '', 'Press <H> for help']