コード例 #1
0
def talker():
    rospy.init_node('kitti_tf_pub', anonymous=True)

    listener = tf.TransformListener()

    marker_pub_ = rospy.Publisher('viz_msgs_marker_publisher',
                                  vis_msg.Marker,
                                  latch=True,
                                  queue_size=10)

    pub = rospy.Publisher('kitti/transformStamped',
                          TransformStamped,
                          queue_size=10)
    rate = rospy.Rate(10)  # 10hz
    while not rospy.is_shutdown():

        try:
            (trans, rot) = listener.lookupTransform('/world', '/camera_left',
                                                    rospy.Time(0))

        except (tf.LookupException, tf.ConnectivityException,
                tf.ExtrapolationException):
            continue

        xform = TransformStamped()

        now = rospy.Time.now()
        xform.header = Header()

        xform.header.frame_id = '/world'
        xform.header.stamp = now
        xform.child_frame_id = '/camera_left'
        xform.transform.translation.x = trans[2]
        xform.transform.translation.y = trans[0]
        xform.transform.translation.z = trans[1]

        xform.transform.rotation.x = rot[0]
        xform.transform.rotation.y = rot[1]
        xform.transform.rotation.z = rot[2]
        xform.transform.rotation.w = rot[3]

        # hello_str = "hello world %s" % rospy.get_time()

        # rospy.loginfo(hello_str)

        pub.publish(xform)
        marker = vis_msg.Marker(type=vis_msg.Marker.SPHERE,
                                ns='xyz',
                                action=vis_msg.Marker.ADD,
                                id=0)
        marker.header.frame_id = '/world'
        marker.header.stamp = now
        marker.scale.x = 1000
        marker.scale.y = 1000
        marker.scale.z = 100
        marker.colors = [ColorRGBA(1.0, 1.0, 1.0, 1.0)]
        # XYZ

        marker.pose.position = xform.transform.translation
        marker.pose.orientation = xform.transform.rotation
        marker.lifetime = rospy.Duration()
        marker_pub_.publish(marker)

        rate.sleep()
コード例 #2
0
            elif cself.detected_class == 'red':
                self.Mover.move(drop_point[0] - self.sweep_dist, drop_point[1],
                                drop_point[2])


if __name__ == '__main__':
    # Check if the node has received a signal to shut down
    # If not, run the talker method

    #Run this program as a new node in the ROS computation graph
    #called /turtlebot_controller.
    rospy.init_node('our_controller', anonymous=True)

    #Static Transforms
    broadcaster = tf2_ros.StaticTransformBroadcaster()
    static_transformStamped = TransformStamped()
    static_transformStamped.header.stamp = rospy.Time.now()
    static_transformStamped.header.frame_id = "/reference/left_hand_camera"
    static_transformStamped.child_frame_id = "/left_hand_camera"

    static_transformStamped.transform.translation.x = 0
    static_transformStamped.transform.translation.y = 0
    static_transformStamped.transform.translation.z = 0

    static_transformStamped.transform.rotation.x = 0
    static_transformStamped.transform.rotation.y = 0
    static_transformStamped.transform.rotation.z = 0
    static_transformStamped.transform.rotation.w = 1

    broadcaster.sendTransform(static_transformStamped)
コード例 #3
0
ファイル: motor_node.py プロジェクト: DaGaMa13/RotomBotto
def main(portName):
    print "Inicializando motores en modo de PRUEBA"

    ###Connection with ROS
    rospy.init_node("motor_node")

    #Suscripcion a Topicos
    subSpeeds = rospy.Subscriber(
        "/hardware/motors/speeds", Float32MultiArray,
        callbackSpeeds)  #Topico para obtener vel y dir de los motores

    #Publicacion de Topicos

    #pubRobPos = rospy.Publisher("mobile_base/position", Float32MultiArray,queue_size = 1)
    #Publica los datos de la posición actual del robot

    pubOdometry = rospy.Publisher("mobile_base/odometry",
                                  Odometry,
                                  queue_size=1)
    #Publica los datos obtenidos de los encoders y analizados para indicar la posicion actual del robot

    #Estableciendo parametros de ROS
    br = tf.TransformBroadcaster(
    )  #Adecuando los datos obtenidos al sistema de coordenadas del robot
    rate = rospy.Rate(1)

    #Comunicacion serial con la tarjeta roboclaw Roboclaw

    print "Roboclaw.-> Abriendo conexion al puerto serial designacion: \"" + str(
        portName) + "\""
    RC = Roboclaw(portName, 38400)
    #Roboclaw.Open(portName, 38400)
    RC.Open()
    address = 0x80
    print "Roboclaw.-> Conexion establecida en el puerto serila designacion \"" + portName + "\" a 38400 bps (Y)"
    print "Roboclaw.-> Limpiando lecturas de enconders previas"
    RC.ResetEncoders(address)

    #Varibles de control de la velocidad
    global leftSpeed
    global rightSpeed
    global newSpeedData

    leftSpeed = 0  #[-1:0:1]
    rightSpeed = 0  #[-1:0:1]
    newSpeedData = False  #Al inciar no existe nuevos datos de movmiento
    speedCounter = 5

    #Variables para la Odometria
    robotPos = Float32MultiArray()
    robotPos = [0, 0, 0]  # [X,Y,TETHA]

    #Ciclo ROS
    #print "[VEGA]:: Probando los motores de ROTOMBOTTO"
    while not rospy.is_shutdown():

        if newSpeedData:  #Se obtuvieron nuevos datos del topico /hardware/motors/speeds

            newSpeedData = False
            speedCounter = 5

            #Indicando la informacion de velocidades a la Roboclaw

            #Realizando trasnformacion de la informacion
            leftSpeed = int(leftSpeed * 127)
            rightSpeed = int(rightSpeed * 127)

            #Asignando las direcciones del motor derecho
            if rightSpeed >= 0:
                RC.ForwardM1(address, rightSpeed)
            else:
                RC.BackwardM1(address, -rightSpeed)

            #Asignando las direcciones del motor izquierdo
            if leftSpeed >= 0:
                RC.ForwardM2(address, leftSpeed)
            else:
                RC.BackwardM2(address, -leftSpeed)

        else:  #NO se obtuvieron nuevos datos del topico, los motores se detienen

            speedCounter -= 1

            if speedCounter == 0:
                RC.ForwardM1(address, 0)
                RC.ForwardM2(address, 0)

            if speedCounter < -1:
                speedCounter = -1

        #-------------------------------------------------------------------------------------------------------

        #Obteniendo informacion de los encoders
        encoderLeft = RC.ReadEncM2(
            address)  #Falta multiplicarlo por -1, tal vez no sea necesario
        encoderRight = RC.ReadEncM1(
            address
        )  #El valor negativo obtenido en este encoder proviene de la posicion de orientacion del motor.
        RC.ResetEncoders(address)

        #print "Lectura de los enconders Encoders: EncIzq" + str(encoderLeft) + "  EncDer" + str(encoderRight)

        ###Calculo de la Odometria
        robotPos = calculateOdometry(robotPos, encoderLeft, encoderRight)

        #pubRobPos=.publish(robotPos) ##Publicando los datos de la pocición obtenida

        ts = TransformStamped()
        ts.header.stamp = rospy.Time.now()
        ts.header.frame_id = "odom"
        ts.child_frame_id = "base_link"

        ts.transform.translation.x = robotPos[0]
        ts.transform.translation.y = robotPos[1]
        ts.transform.translation.z = 0
        ts.transform.rotation = tf.transformations.quaternion_from_euler(
            0, 0, robotPos[2])

        br.sendTransform((robotPos[0], robotPos[1], 0), ts.transform.rotation,
                         rospy.Time.now(), ts.child_frame_id,
                         ts.header.frame_id)

        msgOdom = Odometry()

        msgOdom.header.stamp = rospy.Time.now()
        msgOdom.pose.pose.position.x = robotPos[0]
        msgOdom.pose.pose.position.y = robotPos[1]
        msgOdom.pose.pose.position.z = 0
        msgOdom.pose.pose.orientation.x = 0
        msgOdom.pose.pose.orientation.y = 0
        msgOdom.pose.pose.orientation.z = math.sin(robotPos[2] / 2)
        msgOdom.pose.pose.orientation.w = math.cos(robotPos[2] / 2)

        pubOdometry.publish(msgOdom)  #Publicando los datos de odometría

        rate.sleep()

    #Fin del WHILE ROS
    #------------------------------------------------------------------------------------------------------

    RC.ForwardM1(address, 0)
    RC.ForwardM2(address, 0)
    RC.Close()
コード例 #4
0
    def estimate_pose(self, pts, constraints=True):
        ########### FIXME ##############
        np.random.seed(0)
        ################################

        data = np.array(pts)

        sample_size = self.sample_size
        if sample_size > data.shape[0]:
            sample_size = data.shape[0]

        idx = np.random.choice(data.shape[0], size=sample_size, replace=False)
        tmp = data[idx, :]
        p = np.transpose(tmp[:, 0:3])
        qc = np.transpose(tmp[:, 3:6])
        qv = np.transpose(tmp[:, 6:9])

        # rospy.loginfo('{} {} {}'.format(p, qc, qv))

        max_p = np.max(p, axis=1)
        min_p = np.min(p, axis=1)
        motion_span = np.linalg.norm(max_p - min_p)
        # if motion_span > self.robot_motion_span_min:
        #     return None, None
        #     # rospy.logwarn('Robot motion span: %3.4f', motion_span)

        rospy.loginfo(
            'Estimating pose. Using {} of total {} data points'.format(
                sample_size, data.shape[0]))

        if constraints:
            res, maxerr = relloclib.estimate_pose(p, qc, qv, self.estimated_tf)
        else:
            res, maxerr = relloclib.estimate_pose_no_constraints(
                p, qc, qv, self.estimated_tf)
        self.estimated_tf = res.x

        rospy.loginfo("Average angular error (residual) in deg: {:.2f}".format(
            np.rad2deg(res.fun)))
        rospy.loginfo("Maximum angular error in deg: {:.2f}".format(
            np.rad2deg(maxerr)))
        rospy.loginfo(
            "Recovered transform (tx, ty, tz, rotz): {:.2f}, {:.2f}, {:.2f}, {:.2f}"
            .format(res.x[0], res.x[1], res.x[2], np.rad2deg(res.x[3])))

        est_quat = kdl.Rotation.RPY(0.0, 0.0, res.x[3]).GetQuaternion()
        est_tran = res.x[:3]

        t = TransformStamped()

        t.header.frame_id = self.human_frame
        t.child_frame_id = self.robot_root_frame
        t.header.stamp = rospy.Time.now()
        t.transform.translation.x = est_tran[0]
        t.transform.translation.y = est_tran[1]
        t.transform.translation.z = est_tran[2]
        t.transform.rotation.x = est_quat[0]
        t.transform.rotation.y = est_quat[1]
        t.transform.rotation.z = est_quat[2]
        t.transform.rotation.w = est_quat[3]

        self.estimated = True

        return t, res.fun
コード例 #5
0
def controller(turtlebot_frame, goal_frame):
    """
  Controls a turtlebot whose position is denoted by turtlebot_frame,
  to go to a position denoted by target_frame
  Inputs:
  - turtlebot_frame: the tf frame of the AR tag on your turtlebot
  - target_frame: the tf frame of the target AR tag
  """

    ################################### YOUR CODE HERE ##############

    #Create a publisher and a tf buffer, which is primed with a tf listener
    pub = rospy.Publisher("/yellow/mobile_base/commands/velocity",
                          Twist,
                          queue_size=10)
    tfBuffer = tf2_ros.Buffer()
    tfListener = tf2_ros.TransformListener(tfBuffer)
    br = tf2_ros.TransformBroadcaster()

    # Create a timer object that will sleep long enough to result in
    # a 10Hz publishing rate
    r = rospy.Rate(10)  # 10hz

    K1 = 0.3
    K2 = 1
    Ks = [K1, K2]
    # Loop until the node is killed with Ctrl-C
    while not rospy.is_shutdown():
        try:
            #print("Before")
            rospy.sleep(0.1)
            t = TransformStamped()
            t.header.stamp = rospy.Time.now()
            t.transform.translation.x = 0
            t.transform.translation.y = 0
            t.transform.translation.z = 0

            t.transform.rotation.x = 0
            t.transform.rotation.y = 0
            t.transform.rotation.z = -0.707
            t.transform.rotation.w = 0.707
            t.header.frame_id = "left"
            t.child_frame_id = "ball"

            br.sendTransform(t)

            #t.header.frame_id = "test"
            trans = tfBuffer.lookup_transform(turtlebot_frame, "left",
                                              rospy.Time())

            #print("After")
            # Process trans to get your state error
            # Generate a control command to send to the robot
            #print(trans)
            x = trans.transform.translation.x * Ks[0]
            theta = trans.transform.translation.y * Ks[1]
            #print(turtlebot_frame)
            control_command = Twist(Vector3(x, 0, 0), Vector3(0, 0, theta))

            #################################### end your code ###############

            pub.publish(control_command)
            print(control_command)
        except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
                tf2_ros.ExtrapolationException) as e:
            print(e)
            pass
        # Use our rate object to sleep until it is time to publish again
        r.sleep()
コード例 #6
0
ファイル: test.py プロジェクト: ymd-stella/geometry2
    def test_transform(self):
        b = tf2_ros.Buffer()
        t = TransformStamped()
        t.transform.translation.x = 1.0
        t.transform.rotation = Quaternion(w=0.0, x=1.0, y=0.0, z=0.0)
        t.header.stamp = rclpy.time.Time(seconds=2.0).to_msg()
        t.header.frame_id = 'a'
        t.child_frame_id = 'b'
        b.set_transform(t, 'eitan_rocks')
        out = b.lookup_transform('a', 'b',
                                 rclpy.time.Time(seconds=2.0).to_msg(),
                                 rclpy.time.Duration(seconds=2))
        self.assertEqual(out.transform.translation.x, 1)
        self.assertEqual(out.transform.rotation.x, 1)
        self.assertEqual(out.header.frame_id, 'a')
        self.assertEqual(out.child_frame_id, 'b')

        v = PointStamped()
        v.header.stamp = rclpy.time.Time(seconds=2.0).to_msg()
        v.header.frame_id = 'a'
        v.point.x = 1.0
        v.point.y = 2.0
        v.point.z = 3.0
        # b.registration.add(PointStamped)
        out = b.transform(v, 'b', new_type=PointStamped)
        self.assertEqual(out.point.x, 0)
        self.assertEqual(out.point.y, -2)
        self.assertEqual(out.point.z, -3)

        v = PoseStamped()
        v.header.stamp = rclpy.time.Time(seconds=2.0).to_msg()
        v.header.frame_id = 'a'
        v.pose.position.x = 1.0
        v.pose.position.y = 2.0
        v.pose.position.z = 3.0
        v.pose.orientation = Quaternion(w=0.0, x=1.0, y=0.0, z=0.0)
        out = b.transform(v, 'b')
        self.assertEqual(out.pose.position.x, 0)
        self.assertEqual(out.pose.position.y, -2)
        self.assertEqual(out.pose.position.z, -3)

        # Translation shouldn't affect Vector3
        t = TransformStamped()
        t.transform.translation.x = 1.0
        t.transform.translation.y = 2.0
        t.transform.translation.z = 3.0
        t.transform.rotation = Quaternion(w=1.0, x=0.0, y=0.0, z=0.0)
        v = Vector3Stamped()
        v.vector.x = 1.0
        v.vector.y = 0.0
        v.vector.z = 0.0
        out = tf2_geometry_msgs.do_transform_vector3(v, t)
        self.assertEqual(out.vector.x, 1)
        self.assertEqual(out.vector.y, 0)
        self.assertEqual(out.vector.z, 0)

        # Rotate Vector3 180 deg about y
        t = TransformStamped()
        t.transform.translation.x = 1.0
        t.transform.translation.y = 2.0
        t.transform.translation.z = 3.0
        t.transform.rotation = Quaternion(w=0.0, x=0.0, y=1.0, z=0.0)

        v = Vector3Stamped()
        v.vector.x = 1.0
        v.vector.y = 0.0
        v.vector.z = 0.0

        out = tf2_geometry_msgs.do_transform_vector3(v, t)
        self.assertEqual(out.vector.x, -1)
        self.assertEqual(out.vector.y, 0)
        self.assertEqual(out.vector.z, 0)
コード例 #7
0
    def get_measured_pose_filtered(self, believed_pose, marker):
        time_stamp = believed_pose.header.stamp
        frame_detected = "tjululu/detected" + str(marker.id)
        if marker.id > 15:
            frame_marker = "sign/warning_roundabout"
        else:
            frame_marker = "aruco/marker" + str(marker.id)
        t = self.transform_from_marker(marker, frame_detected, time_stamp)
        self.tf_buf.set_transform(t, "gandalfs_authority")
        #self.broadcaster.sendTransform(t) # for vizualization
        measured_orientation = self.get_map_to_map_detected_rotation(
            frame_marker, frame_detected, time_stamp)
        if not measured_orientation:
            return

        # Disregard rotational diplacement of marker detection
        ax, ay, az = euler_from_quaternion([
            measured_orientation.x, measured_orientation.y,
            measured_orientation.z, measured_orientation.w
        ])
        ax *= self.filter_config[3]
        ay *= self.filter_config[4]
        az *= self.filter_config[5]
        measured_orientation = Quaternion(*quaternion_from_euler(ax, ay, az))

        marker_map_frame = frame_marker + "_map_reference"
        map_to_marker = self.tf_buf.lookup_transform_core(
            "map", frame_marker, time_stamp)
        map_to_marker.child_frame_id = marker_map_frame
        map_to_marker.header.stamp = time_stamp
        map_to_marker.transform.rotation = Quaternion(*[0, 0, 0, 1])
        self.tf_buf.set_transform(map_to_marker, "gandalfs_authority")
        #self.broadcaster.sendTransform(map_to_marker) # for visualization

        detected_map_frame = frame_detected + "_map_reference"
        # not use _core here?
        map_to_detected = self.tf_buf.lookup_transform_core(
            "map", frame_detected, time_stamp)
        map_to_detected.child_frame_id = detected_map_frame
        map_to_detected.header.stamp = time_stamp
        # Disregard translational diplacement of marker detection
        if not self.filter_config[0]:
            map_to_detected.transform.translation.x = map_to_marker.transform.translation.x
        if not self.filter_config[1]:
            map_to_detected.transform.translation.y = map_to_marker.transform.translation.y
        if not self.filter_config[2]:
            map_to_detected.transform.translation.z = map_to_marker.transform.translation.z
        map_to_detected.transform.rotation = measured_orientation
        self.tf_buf.set_transform(map_to_detected, "gandalfs_authority")
        #self.broadcaster.sendTransform(map_to_detected) # for visualization

        pose_in_detected = self.tf_buf.transform(believed_pose,
                                                 detected_map_frame)

        pose_in_marker = pose_in_detected
        pose_in_marker.header.frame_id = marker_map_frame
        measured_pose = self.tf_buf.transform(pose_in_marker,
                                              believed_pose.header.frame_id)

        # visualization

        #map_to_marker = self.tf_buf.lookup_transform_core("map", frame_marker, time_stamp)
        #marker_to_detected = self.tf_buf.lookup_transform_core(marker_map_frame, detected_map_frame, time_stamp)
        #map_to_detected = self.tf_buf.lookup_transform_core("map", frame_detected, time_stamp)
        ref_to_ref_det = self.tf_buf.lookup_transform_core(
            marker_map_frame, detected_map_frame, time_stamp)
        map_to_detected_ref = self.tf_buf.lookup_transform_core(
            "map", detected_map_frame, time_stamp)
        filt_det = TransformStamped()
        filt_det.header.stamp = time_stamp
        filt_det.header.frame_id = "map"
        filt_det.child_frame_id = frame_detected + "_filtered"
        filt_det.transform.translation = map_to_detected_ref.transform.translation
        filt_det.transform.rotation = ref_to_ref_det.transform.rotation
        #q1 = [map_to_marker.transform.rotation.x, map_to_marker.transform.rotation.y,
        #      map_to_marker.transform.rotation.z, map_to_marker.transform.rotation.w]
        #q2 = [measured_orientation.x, measured_orientation.y, measured_orientation.z, measured_orientation.w]
        #q = quaternion_multiply(q1, q2)
        #filt_det.transform.rotation = Quaternion(*q)
        #self.broadcaster.sendTransform(filt_det)

        # end visualization

        return self.tf_buf.transform(measured_pose, "map")
コード例 #8
0
def go():
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.connect((host, port))

    first = True

    buf = ''
    count = 0
    while True:
        d = s.recv(2**12)
        if not d:
            break
        buf += d

        lines = buf.split('\n')
        buf = lines[-1]
        for line in lines[:-1]:
            if first:
                first = False
                continue

            if count % decimation == 0:
                d = json.loads(line)

                ecef_cov = numpy.array(d[
                    'X_position_relative_position_orientation_ecef_covariance']
                                       )
                absodom_pub.publish(
                    Odometry(
                        header=Header(
                            stamp=rospy.Time.from_sec(d['timestamp'] * 1e-9),
                            frame_id='/ecef',
                        ),
                        child_frame_id=child_frame_id,
                        pose=PoseWithCovariance(
                            pose=Pose(
                                position=Point(*d['position_ecef']),
                                orientation=Quaternion(
                                    **d['orientation_ecef']),
                            ),
                            covariance=numpy.vstack((
                                numpy.hstack(
                                    (ecef_cov[0:3, 0:3], ecef_cov[0:3, 6:9])),
                                numpy.hstack(
                                    (ecef_cov[6:9, 0:3], ecef_cov[6:9, 6:9])),
                            )).flatten(),
                        ),
                        twist=TwistWithCovariance(
                            twist=Twist(
                                linear=Vector3(*d['velocity_body']),
                                angular=Vector3(*d['angular_velocity_body']),
                            ),
                            covariance=numpy.vstack((
                                numpy.hstack((d['X_velocity_body_covariance'],
                                              numpy.zeros((3, 3)))),
                                numpy.hstack(
                                    (numpy.zeros((3, 3)),
                                     d['X_angular_velocity_body_covariance'])),
                            )).flatten(),
                        ),
                    ))
                odom_pub.publish(
                    Odometry(
                        header=Header(
                            stamp=rospy.Time.from_sec(d['timestamp'] * 1e-9),
                            frame_id='/enu',
                        ),
                        child_frame_id=child_frame_id,
                        pose=PoseWithCovariance(
                            pose=Pose(
                                position=Point(*d['relative_position_enu']),
                                orientation=Quaternion(**d['orientation_enu']),
                            ),
                            covariance=numpy.array(d[
                                'X_relative_position_orientation_enu_covariance']
                                                   ).flatten(),
                        ),
                        twist=TwistWithCovariance(
                            twist=Twist(
                                linear=Vector3(*d['velocity_body']),
                                angular=Vector3(*d['angular_velocity_body']),
                            ),
                            covariance=numpy.vstack((
                                numpy.hstack((d['X_velocity_body_covariance'],
                                              numpy.zeros((3, 3)))),
                                numpy.hstack(
                                    (numpy.zeros((3, 3)),
                                     d['X_angular_velocity_body_covariance'])),
                            )).flatten(),
                        ),
                    ))
                clock_error_pub.publish(Float64(d['X_clock_error']))
                tf_pub.publish(
                    tfMessage(transforms=[
                        TransformStamped(
                            header=Header(
                                stamp=rospy.Time.from_sec(d['timestamp'] *
                                                          1e-9),
                                frame_id='/enu',
                            ),
                            child_frame_id=child_frame_id,
                            transform=Transform(
                                translation=Point(*d['relative_position_enu']),
                                rotation=Quaternion(**d['orientation_enu']),
                            ),
                        ),
                    ], ))

            count += 1
コード例 #9
0
def update_pos_by_odom(od_msg):
    global x, y, th, last_time
    current_time = od_msg.stamp
    if last_time == None:
        last_time = current_time
        return

    # compute odometry in a typical way given the velocities of the robot
    dt = (current_time - last_time).to_sec()
    d_l = od_msg.pulses[0] / PPR * WHEEL_DIA * pi
    d_r = od_msg.pulses[1] / PPR * WHEEL_DIA * pi
    v_l = d_l / dt
    v_r = d_r / dt

    v = (v_l + v_r) / 2.0
    #   odom座標系での速度成分は、
    #    vx=v*cos(th)
    #    vy=v*sin(th)
    #   こうなるが「速度情報は "base_link" 座標系に対して送る」ので、
    #   http://wiki.ros.org/ja/navigation/Tutorials/RobotSetup/Odom
    #   下記でいいらしい。poseはodom、twistはbase_link
    vx = v
    vy = 0
    vth = (v_r - v_l) / TREAD  #omega

    if d_l == d_r:
        x += v * dt * cos(th)
        y += v * dt * sin(th)
    else:
        delta_th = vth * dt
        R = v / vth
        rospy.logdebug("R=%.1f", R)
        #        print "R=",R
        #   第1項:旋回開始点から旋回中心へ、第2項:旋回中心から旋回終了点へ
        x += R * cos(th + pi / 2.0) + R * cos(th - pi / 2.0 + delta_th)
        y += R * sin(th + pi / 2.0) + R * sin(th - pi / 2.0 + delta_th)
        #        if v>=0:
        #            x += R*cos(th+pi/2.0)+R*cos(th-pi/2.0+delta_th)
        #            y += R*sin(th+pi/2.0)+R*sin(th-pi/2.0+delta_th)
        #        else:
        #            x += R*cos(th-pi/2.0)+R*cos(th+pi/2.0+delta_th)
        #            y += R*sin(th-pi/2.0)+R*sin(th+pi/2.0+delta_th)
        th += delta_th

    t = TransformStamped()
    t.transform.translation.x = x
    t.transform.translation.y = y
    t.transform.translation.z = 0.0

    # since all odometry is 6DOF we'll need a quaternion created from yaw
    odom_quat = tf_conversions.transformations.quaternion_from_euler(0, 0, th)
    t.transform.rotation.x = odom_quat[0]
    t.transform.rotation.y = odom_quat[1]
    t.transform.rotation.z = odom_quat[2]
    t.transform.rotation.w = odom_quat[3]

    # first, we'll publish the transform over tf
    t.header.stamp = current_time
    t.header.frame_id = "odom"
    t.child_frame_id = "base_footprint_odom"
    odom_broadcaster.sendTransform(t)

    # next, we'll publish the odometry message over ROS
    odom = Odometry()
    odom.header.stamp = current_time
    odom.header.frame_id = "odom"

    # set the position
    odom.pose.pose = Pose(Point(x, y, 0.), Quaternion(*odom_quat))

    # set the velocity
    odom.child_frame_id = "base_footprint_odom"
    odom.twist.twist = Twist(Vector3(vx, vy, 0), Vector3(0, 0, vth))

    # publish the message
    odom_pub.publish(odom)

    last_time = current_time
コード例 #10
0
def main_function():

    global msg

    rospy.init_node("bagmerge_node", anonymous=True)

    rospy.sleep(0.5)

    for k in range(len(input_file)):
        sys.stdout.write("\n%s" % input_file[k])
        sys.stdout.flush()

    if raw_input("\nAre you sure these are the correct files? (y/n): ") == "y":
        sys.stdout.write("\n%s\n" % output_file)
        sys.stdout.flush()
        if os.path.exists(output_file):
            if raw_input(
                    "Output file already exists, do you want to replace it? (y/n): "
            ) == "y":
                os.remove(output_file)
            else:
                sys.exit(0)
    else:
        sys.exit(0)

#    sys.stdout.write("\n%s" % input_file)
#    sys.stdout.write("\n%s\n" % output_file)
#    sys.stdout.flush()
#    os.remove(output_file)

    bag_in = numpy.empty(len(input_file), dtype=object)
    bag_time = numpy.zeros((2, len(input_file)))

    for k in range(len(input_file)):

        sys.stdout.write("\nOpening bag %d... " % (k + 1))
        sys.stdout.flush()
        bag_in[k] = rosbag.Bag(input_file[k])
        sys.stdout.write("Ok!")
        sys.stdout.flush()

        bag_time[1, k] = bag_in[k].get_end_time() - bag_in[k].get_start_time()
        for topic, msg, t in bag_in[k].read_messages():
            bag_time[0, k] = rospy.Time.to_sec(t)
            break

    if match_bags:
        bag_start_time = min(bag_time[0])
        bag_end_time = min(bag_time[0]) + max(bag_time[1])
        bag_shift_time = bag_time[0] - min(bag_time[0])
    else:
        bag_start_time = min(bag_time[0])
        bag_end_time = max(bag_time[0] + bag_time[1])
        bag_shift_time = numpy.zeros(len(input_file))

    sys.stdout.write(
        "\n\nBag starting: %10.9f\nBag ending:   %10.9f\nBag duration: %3.1fs\n"
        % (bag_start_time, bag_end_time, (bag_end_time - bag_start_time)))
    for k in range(len(input_file)):
        sys.stdout.write("Bag %d shift:  %3.1f\n" %
                         ((k + 1), bag_shift_time[k]))
    sys.stdout.flush()
    sys.stdout.write("\n")

    bag_out = rosbag.Bag(output_file, "w")

    odom_seq = 0
    k = 0

    while k < len(input_file):

        if "volta" in output_file:
            tf_t265_init = (21.4719467163, 0.453556478024, -0.0645855739713,
                            -0.000818398199044, 0.00119601248298,
                            0.983669400215, 0.179978996515)
        elif "200210_173013" in input_file[k]:
            tf_t265_init = (-10.7891979218, -8.670835495, -0.227906405926,
                            0.00645246729255, 0.000483442592667,
                            0.979790627956, -0.199921101332)
        else:
            tf_t265_init = (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0)

        for topic, msg, t in bag_in[k].read_messages():

            topic_bool = False

            if rospy.is_shutdown():
                k = len(input_file)
                break

            if match_bags and hasattr(msg, "header"):
                t = rospy.Time.from_sec(
                    rospy.Time.to_sec(t) - bag_shift_time[k])
                msg.header.stamp = copy(t)

            if False:
                pass

#            elif topic == "/ar_pose_marker":
#                topic_bool = True
#                bag_out.write(topic, msg, t)
#
#            elif topic == "/ar_pose_marker_back":
#                topic_bool = True
#                bag_out.write(topic, msg, t)
#
#            elif topic == "/axis_back/camera_info":
#                topic_bool = True
#                bag_out.write(topic, msg, t)
#
#            elif topic == "/axis_back/image_raw/compressed":
#                topic_bool = True
#                bag_out.write(topic, msg, t)
#
#            elif topic == "/axis_front/camera_info":
#                topic_bool = True
#                bag_out.write(topic, msg, t)
#
#            elif topic == "/axis_front/image_raw/compressed":
#                topic_bool = True
#                bag_out.write(topic, msg, t)
#
            elif topic == "/cmd_vel":
                topic_bool = True
                bag_out.write(topic, msg, t)
#
#            elif topic == "/decawave/tag_pose":
#                topic_bool = True
#                bag_out.write(topic, msg, t)
#                uwb_msg = Odometry()
#                uwb_msg.header = msg.header
#                uwb_msg.pose.pose.position.x = msg.x
#                uwb_msg.pose.pose.position.y = msg.y
#                uwb_msg.pose.pose.position.z = msg.z
#                uwb_msg.pose.pose.orientation.w = 1.0
#                bag_out.write("/decawave/odom", uwb_msg, t)
#
#            elif topic == "/device1/get_joint_state":
#                topic_bool = True
#                bag_out.write(topic, msg, t)
#
#            elif topic == "/device2/get_joint_state":
#                topic_bool = True
#                bag_out.write(topic, msg, t)
#
#            elif topic == "/device3/get_joint_state":
#                topic_bool = True
#                bag_out.write(topic, msg, t)
#
#            elif topic == "/device4/get_joint_state":
#                topic_bool = True
#                bag_out.write(topic, msg, t)
#
#            elif topic == "/device5/get_joint_state":
#                topic_bool = True
#                bag_out.write(topic, msg, t)
#
#            elif topic == "/device6/get_joint_state":
#                topic_bool = True
#                bag_out.write(topic, msg, t)

            elif topic == "/imu/data":
                topic_bool = True
                bag_out.write(topic, msg, t)
                msg_wheel = copy(msg)
                msg_wheel.header.frame_id = "imu_wheel_ekf"
                bag_out.write("/imu_wheel_ekf/data", msg_wheel, t)

#            elif topic == "/laser_odom_to_init":
#                topic_bool = True
#                bag_out.write(topic, msg, t)
#
#            elif topic == "/integrated_to_init":
#                topic_bool = True
#                bag_out.write(topic, msg, t)

            elif topic == "/os1_cloud_node/points":
                topic_bool = True
                bag_out.write(topic, msg, t)

            elif topic == "/robot_odom" or topic == "/odom":
                topic_bool = True
                k = 0.01 + abs(msg.pose.pose.orientation.z)
                msg.pose.covariance = [
                    k, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, k, 0.0, 0.0, 0.0, 0.0,
                    0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
                    0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
                    100.0 * k
                ]
                bag_out.write("/odom", msg, t)

            elif topic == "/realsense/gyro/sample":
                topic_bool = True
                topic = "/t265" + topic[10:]
                msg.header.frame_id = "t265" + msg.header.frame_id[9:]
                ang_vel = msg.angular_velocity

            elif topic == "/realsense/accel/sample":
                topic_bool = True
                topic = "/t265" + topic[10:]
                msg.header.frame_id = "t265" + msg.header.frame_id[9:]
                if "ang_vel" in locals():
                    imu_data = copy(msg)
                    imu_data.linear_acceleration.x = +copy(
                        msg.linear_acceleration.z)
                    imu_data.linear_acceleration.y = -copy(
                        msg.linear_acceleration.x)
                    imu_data.linear_acceleration.z = -copy(
                        msg.linear_acceleration.y)
                    imu_data.angular_velocity.x = +copy(ang_vel.z)
                    imu_data.angular_velocity.y = -copy(ang_vel.x)
                    imu_data.angular_velocity.z = -copy(ang_vel.y)
                    bag_out.write("/t265/imu/sample", imu_data, t)

            elif topic == "/realsense/fisheye1/image_raw/compressed":
                topic_bool = True
                topic = "/left/image_raw/compressed"
                msg.header.frame_id = "t265" + msg.header.frame_id[9:]
                bag_out.write(topic, msg, t)

            elif topic == "/realsense/fisheye1/camera_info":
                topic_bool = True
                topic = "/left/camera_info"
                msg.header.frame_id = "t265" + msg.header.frame_id[9:]
                bag_out.write(topic, msg, t)

            elif topic == "/realsense/fisheye2/image_raw/compressed" or topic == "realsense/fisheye2/image_raw/compressed":
                topic_bool = True
                topic = "/right/image_raw/compressed"
                msg.header.frame_id = "t265" + msg.header.frame_id[9:]
                bag_out.write(topic, msg, t)

            elif topic == "/realsense/fisheye2/camera_info" or topic == "/realsense/fisheye2/camera_info/":
                topic_bool = True
                topic = "/right/camera_info"
                msg.header.frame_id = "t265" + msg.header.frame_id[9:]
                Tx = msg.P[0] * (
                    2.0 *
                    tf_args[tf_args[:, 8] == "t265_fisheye2_frame", 1][0])
                msg.P = msg.P[0:3] + (Tx, ) + msg.P[4:12]
                bag_out.write(topic, msg, t)

            elif topic == "/realsense/odom/sample":

                topic_bool = True
                topic = "/t265" + topic[10:]
                msg.header.frame_id = "t265_init"
                msg.child_frame_id = "t265_pose"
                v_old = (msg.pose.pose.position.x, msg.pose.pose.position.y,
                         msg.pose.pose.position.z)
                q_old = (msg.pose.pose.orientation.x,
                         msg.pose.pose.orientation.y,
                         msg.pose.pose.orientation.z,
                         msg.pose.pose.orientation.w)
                v_new = qv_mult(q_conjugate(tf_t265_init[3:]),
                                (v_old[0] - tf_t265_init[0], v_old[1] -
                                 tf_t265_init[1], v_old[2] - tf_t265_init[2]))
                q_new = qq_mult(q_conjugate(tf_t265_init[3:]), q_old)
                msg.pose.pose.position.x = v_new[0]
                msg.pose.pose.position.y = v_new[1]
                msg.pose.pose.position.z = v_new[2]
                msg.pose.pose.orientation.x = q_new[0]
                msg.pose.pose.orientation.y = q_new[1]
                msg.pose.pose.orientation.z = q_new[2]
                msg.pose.pose.orientation.w = q_new[3]
                bag_out.write(topic, msg, t)

                clk_msg = Clock()
                clk_msg.clock = copy(t)
                bag_out.write("/clock", clk_msg, t)

            elif topic == "/tf":

                if msg.transforms[0].child_frame_id == "realsense_pose_frame":

                    topic_bool = True
                    msg.transforms[0].header.frame_id = "t265_init"
                    msg.transforms[0].child_frame_id = "t265_pose"
                    v_old = (msg.transforms[0].transform.translation.x,
                             msg.transforms[0].transform.translation.y,
                             msg.transforms[0].transform.translation.z)
                    q_old = (msg.transforms[0].transform.rotation.x,
                             msg.transforms[0].transform.rotation.y,
                             msg.transforms[0].transform.rotation.z,
                             msg.transforms[0].transform.rotation.w)
                    v_new = qv_mult(
                        q_conjugate(tf_t265_init[3:]),
                        (v_old[0] - tf_t265_init[0], v_old[1] -
                         tf_t265_init[1], v_old[2] - tf_t265_init[2]))
                    q_new = qq_mult(q_conjugate(tf_t265_init[3:]), q_old)
                    msg.transforms[0].transform.translation.x = v_new[0]
                    msg.transforms[0].transform.translation.y = v_new[1]
                    msg.transforms[0].transform.translation.z = v_new[2]
                    msg.transforms[0].transform.rotation.x = q_new[0]
                    msg.transforms[0].transform.rotation.y = q_new[1]
                    msg.transforms[0].transform.rotation.z = q_new[2]
                    msg.transforms[0].transform.rotation.w = q_new[3]
                    bag_out.write(topic, msg, t)

                    if "200123_163834" in input_file[k]:
                        odom_seq += 1
                        odom_msg = Odometry()
                        odom_msg.header = copy(msg.transforms[0].header)
                        odom_msg.header.seq = copy(odom_seq)
                        odom_msg.child_frame_id = copy(
                            msg.transforms[0].child_frame_id)
                        odom_msg.pose.pose.position = copy(
                            msg.transforms[0].transform.translation)
                        odom_msg.pose.pose.orientation = copy(
                            msg.transforms[0].transform.rotation)
                        bag_out.write("/t265/odom/sample", odom_msg, t)


#                topic_bool = True
#                if msg.transforms[0].child_frame_id == "estimation":
#                    uwb_ekf_msg = Pose()
#                    uwb_ekf_msg.position = copy(msg.transforms[0].transform.translation)
#                    uwb_ekf_msg.orientation = copy(msg.transforms[0].transform.rotation)
#                    bag_out.write("/espeleo/pose", uwb_ekf_msg, t)

            if topic_bool:
                status_time = 100.0 * (rospy.Time.to_sec(t) - bag_start_time
                                       ) / (bag_end_time - bag_start_time)
                sys.stdout.write(
                    "\rBag  %d/%d - Publishing %-28s %5.1f%%" %
                    ((k + 1), len(input_file), topic, status_time))
                sys.stdout.flush()

        sys.stdout.write("\n")
        sys.stdout.flush()

        k += 1

    sys.stdout.write("\n")
    sys.stdout.flush()

    for k in range(len(tf_args)):
        #        break
        t = bag_start_time

        while t < bag_end_time and not rospy.is_shutdown():

            tf_msg = TransformStamped()
            tf_msg.header.stamp = rospy.Time.from_sec(t)
            tf_msg.header.frame_id = tf_args[k, 7]
            tf_msg.child_frame_id = tf_args[k, 8]
            tf_msg.transform.translation.x = tf_args[k, 0]
            tf_msg.transform.translation.y = tf_args[k, 1]
            tf_msg.transform.translation.z = tf_args[k, 2]
            tf_msg.transform.rotation.x = tf_args[k, 3]
            tf_msg.transform.rotation.y = tf_args[k, 4]
            tf_msg.transform.rotation.z = tf_args[k, 5]
            tf_msg.transform.rotation.w = tf_args[k, 6]

            bag_out.write("/tf", TFMessage([tf_msg]), tf_msg.header.stamp)

            t += 1.0 / tf_args[k, 9]

            status_time = 100.0 * (t - bag_start_time) / (bag_end_time -
                                                          bag_start_time)
            sys.stdout.write(
                "\rTF %2d/%2d - Publishing %-28s %5.1f%%" %
                ((k + 1), len(tf_args), tf_args[k, 8], status_time))
            sys.stdout.flush()

        sys.stdout.write("\n")
        sys.stdout.flush()

    for k in range(len(input_file)):
        bag_in[k].close()
    bag_out.close()

    sys.stdout.write("\nFiles Closed\n\n")
    sys.stdout.flush()
コード例 #11
0

if __name__ == '__main__':

    # ros node init
    rospy.init_node('multi_modal_states_pub_node', anonymous=True)

    # the publish rate of multiModal_states
    publish_rate = 50

    # tf of interest
    tfoi = ["/left_hand_xsens_new","/right_hand_xsens_new","/left_hand","/right_hand"]

    # the multimodal states to include all info of interest
    multiModal_states = multiModal()
    multiModal_states.tf_of_interest.transforms = [TransformStamped() for i in range(len(tfoi))]

    # the listener for tf of interest
    listener = tf.TransformListener()

    # the flag
    flag_emg = False
    flag_imu = False
    flag_endpoint_state = False
    flag_joint_state = False
    flag_tf = False

    # subscribe the states topics
    rospy.Subscriber("/myo_raw_emg_pub", emgState, callback_emg)
    rospy.Subscriber("/myo_raw_imu_pub", Imu, callback_imu)
    rospy.Subscriber("/robot/limb/left/endpoint_state", EndpointState, callback_endpoint_state)
コード例 #12
0
ファイル: car_env.py プロジェクト: azeng97/avdrift
    def getState(self, posData):
        # odomEstimate = self.getOdomEstimate()
        # velxEstimate = odomEstimate.twist.twist.linear.x
        # velyEstimate = odomEstimate.twist.twist.linear.y

        pose = posData.pose.pose
        position = pose.position
        orientation = pose.orientation

        t = tf.TransformListener()
        m = TransformStamped()
        m.header.frame_id = 'map'
        m.child_frame_id = 'drift_car/base_link'
        m.transform.translation.x = position.x
        m.transform.translation.y = position.y
        m.transform.translation.z = position.z
        m.transform.rotation.x = orientation.x
        m.transform.rotation.y = orientation.y
        m.transform.rotation.z = orientation.z
        m.transform.rotation.w = orientation.w

        (roll, pitch, yaw) = tf.transformations.euler_from_quaternion(
            [orientation.x, orientation.y, orientation.z, orientation.w])
        t.setTransform(m)

        velx = posData.twist.twist.linear.x
        vely = posData.twist.twist.linear.y
        velz = posData.twist.twist.linear.z

        # Transform to body frame velocities
        velVector = Vector3Stamped()
        velVector.vector.x = velx
        velVector.vector.y = vely
        velVector.vector.z = velz
        velVector.header.frame_id = "map"
        # velVectorTransformed = self.tl.transformVector3("base_link", velVector)
        t.waitForTransform('/drift_car/base_link', 'map', rospy.Time(),
                           rospy.Duration(0.1))
        velVectorTransformed = t.transformVector3("drift_car/base_link",
                                                  velVector)
        velxBody = velVectorTransformed.vector.x
        velyBody = velVectorTransformed.vector.y
        velzBody = velVectorTransformed.vector.z

        carTangentialSpeed = math.sqrt(velx**2 + vely**2)
        carAngularVel = posData.twist.twist.angular.z

        stateInfo = {
            "x": position.x,
            "y": position.y,
            "i": orientation.x,
            "j": orientation.y,
            "k": orientation.z,
            "w": orientation.w,
            "xdot": velx,
            "ydot": vely,
            "thetadot": carAngularVel,
            "s": carTangentialSpeed,
            "xdotbodyframe": velxBody,
            "ydotbodyframe": velyBody
        }

        state = []
        for key in self.state_info:
            state.append(stateInfo[key])

            # rewardState = [stateInfo["thetadot"], stateInfo["xdotbodyframe"], stateInfo["ydotbodyframe"]]
        # print(state)
        return np.array(state)
コード例 #13
0
                (cMoTrans,
                 cMoRot) = listener.lookupTransform(args.object_frame,
                                                    args.camera_frame,
                                                    rospy.Time(0))
            except (
                    tf.LookupException,
                    tf.ConnectivityException,
                    tf.ExtrapolationException,
            ):
                print("Error while looking for cMo and wMe transforms")
                continue

            stamp = rospy.Time.now()

            cMo = TransformStamped(
                header=Header(seq, stamp, args.camera_frame),
                child_frame_id=args.object_frame,
                transform=Transform(cMoTrans, cMoRot),
            )
            wMe = TransformStamped(
                header=Header(seq, stamp, args.world_frame),
                child_frame_id=args.end_effector_frame,
                transform=Transform(wMeTrans, wMeRot),
            )

            pub.publish(cMo)
            pub.publish(wMe)
            publish = False

        rate.sleep()
コード例 #14
0
    def proc_imu(quat1, acc, gyro):
        # New info:
        # https://github.com/thalmiclabs/myo-bluetooth/blob/master/myohw.h#L292-L295
        # Scale values for unpacking IMU data
        # define MYOHW_ORIENTATION_SCALE   16384.0f
        # See myohw_imu_data_t::orientation
        # define MYOHW_ACCELEROMETER_SCALE 2048.0f
        # See myohw_imu_data_t::accelerometer
        # define MYOHW_GYROSCOPE_SCALE     16.0f
        # See myohw_imu_data_t::gyroscope
        if not any(quat1):
            # If it's all 0's means we got no data, don't do anything
            return
        h = Header()
        h.stamp = rospy.Time.now()
        # frame_id is node name without /
        h.frame_id = rospy.get_name()[1:]
        
        h2=Header()
        h2.stamp = rospy.Time.now()
        h2.frame_id="imu_global"
        
        # We currently don't know the covariance of the sensors with each other
        cov = [0, 0, 0, 0, 0, 0, 0, 0, 0]
        quat = Quaternion(quat1[1] / 16384.0,
                          quat1[2] / 16384.0,
                          quat1[3] / 16384.0,
                          quat1[0] / 16384.0)
        # Normalize the quaternion and accelerometer values
        quatNorm = sqrt(quat.x * quat.x + quat.y *
                        quat.y + quat.z * quat.z + quat.w * quat.w)
        normQuat = Quaternion(quat.x / quatNorm,
                              quat.y / quatNorm,
                              quat.z / quatNorm,
                              quat.w / quatNorm)

        direction=1.0;
        normAcc = Vector3(direction*acc[0] / 2048.0*9.80665 ,
                          direction*acc[1] / 2048.0*9.80665 ,
                          direction*acc[2] / 2048.0*9.80665 )
        normGyro = Vector3(gyro[0] / 16.0*3.141592/180.0, gyro[1] / 16.0*3.141592/180.0, gyro[2] / 16.0*3.141592/180.0)
        imu = Imu(h, normQuat, cov, normGyro, cov, normAcc, cov)
        imuPub.publish(imu)
        roll, pitch, yaw = euler_from_quaternion([normQuat.x,
                                                  normQuat.y,
                                                  normQuat.z,
                                                  normQuat.w])
        oriPub.publish(Vector3(roll, pitch, yaw))
        oriDegPub.publish(Vector3(degrees(roll), degrees(pitch), degrees(yaw)))
        posePub.publish( PoseStamped(h,Pose(Point(0.0,0.0,0.0),normQuat)) )
        null_vector=Vector3(0,0,0)
        accTwi=Twist(normAcc,null_vector)
        acc_twist=TwistStamped(h,accTwi)
        accTwistPub.publish(acc_twist)
        
        velTwi=Twist(null_vector,normGyro)
        velTwist=TwistStamped(h,velTwi)
        velTwistPub.publish(velTwist)

        br = tf2_ros.TransformBroadcaster()
        t = TransformStamped()

        t.header.stamp = rospy.Time.now()
        t.header.frame_id = h2.frame_id
        t.child_frame_id  = h.frame_id
        t.transform.translation.x = 0
        t.transform.translation.y = 0
        t.transform.translation.z =0
    
        t.transform.rotation.x = normQuat.x
        t.transform.rotation.y = normQuat.y
        t.transform.rotation.z = normQuat.z
        t.transform.rotation.w = normQuat.w
	
        br.sendTransform(t)        
        
        T_global_raw = posemath.fromMsg(Pose(Point(0.0,0.0,0.0),normQuat))
        acc_in_raw=PyKDL.Vector(normAcc.x,normAcc.y,normAcc.z)
        gyro_in_raw=PyKDL.Vector(normGyro.x,normGyro.y,normGyro.z)
        
        acc_in_global=T_global_raw*acc_in_raw
        gyro_in_global=T_global_raw*gyro_in_raw
        
        acc_in_global_msg=Vector3(acc_in_global[0],acc_in_global[1],acc_in_global[2]-direction*9.80665)
        accTwi=Twist(acc_in_global_msg,null_vector)
        acc_twist=TwistStamped(h2,accTwi)
        accTwistPubGlobal.publish(acc_twist)
        
        gyro_in_global_msg=Vector3(gyro_in_global[0],gyro_in_global[1],gyro_in_global[2])
        velTwi=Twist(null_vector,gyro_in_global_msg)
        velTwist=TwistStamped(h,velTwi)
        velTwistPubGlobal.publish(velTwist)

        imu = Imu(h, Quaternion(0,0,0,1), cov, gyro_in_global_msg, cov, acc_in_global_msg, cov)
        imuPubGlobal.publish(imu)
コード例 #15
0
    def __init__(self,
                 grasp_planner_service_name,
                 grasp_planner_service,
                 user_cmd_service_name,
                 panda_service_name,
                 verbose=False):

        self._verbose = verbose

        # --- new grasp command service --- #
        self._new_grasp_srv = rospy.Service(user_cmd_service_name, UserCmd,
                                            self.user_cmd)

        rospy.loginfo(
            "GraspingBenchmarksManager: Waiting for grasp planner service...")
        rospy.wait_for_service(grasp_planner_service_name, timeout=30.0)
        self._grasp_planner = rospy.ServiceProxy(grasp_planner_service_name,
                                                 GraspPlanner)
        rospy.loginfo(
            "...Connected with service {}".format(grasp_planner_service_name))

        # --- panda service --- #
        panda_service_name = "/panda_grasp_server/panda_grasp"
        rospy.loginfo(
            "GraspingBenchmarksManager: Waiting for panda control service...")
        rospy.wait_for_service(panda_service_name, timeout=60.0)
        self._panda = rospy.ServiceProxy(panda_service_name, PandaGrasp)
        rospy.loginfo(
            "...Connected with service {}".format(panda_service_name))

        # --- subscribers to camera topics --- #
        self._cam_info_sub = message_filters.Subscriber(
            '/camera/aligned_depth_to_color/camera_info', CameraInfo)
        self._rgb_sub = message_filters.Subscriber('/camera/color/image_raw',
                                                   Image)
        self._depth_sub = message_filters.Subscriber(
            '/camera/aligned_depth_to_color/image_raw', Image)
        self._pc_sub = message_filters.Subscriber(
            '/camera/depth_registered/points', PointCloud2)
        self._seg_sub = rospy.Subscriber('rgb/image_seg',
                                         Image,
                                         self.seg_img_callback,
                                         queue_size=10)

        # --- camera data synchronizer --- #
        self._tss = message_filters.ApproximateTimeSynchronizer(
            [self._cam_info_sub, self._rgb_sub, self._depth_sub, self._pc_sub],
            queue_size=1,
            slop=0.5)
        self._tss.registerCallback(self._camera_data_callback)

        # --- robot/camera transform listener --- #
        self._tfBuffer = tf2_ros.buffer.Buffer()
        listener = tf2_ros.transform_listener.TransformListener(self._tfBuffer)

        # --- camera messages --- #
        self._cam_info_msg = None
        self._rgb_msg = None
        self._depth_msg = None
        self._pc_msg = None
        self._camera_pose = TransformStamped()
        self._root_reference_frame = 'panda_link0'

        self._seg_msg = NEW_MSG

        self._new_camera_data = False
        self._abort = False
コード例 #16
0
ファイル: odrive_node.py プロジェクト: venkatraman4berge/Hugo
    def __init__(self):
        self.axis_for_right = float(rospy.get_param(
            '~axis_for_right',
            0))  # if right calibrates first, this should be 0, else 1
        self.wheel_track = float(rospy.get_param(
            '~wheel_track', 0.285))  # m, distance between wheel centres
        self.tyre_circumference = float(
            rospy.get_param('~tyre_circumference', 0.341)
        )  # used to translate velocity commands in m/s into motor rpm

        self.connect_on_startup = rospy.get_param('~connect_on_startup', False)
        #self.calibrate_on_startup = rospy.get_param('~calibrate_on_startup', False)
        #self.engage_on_startup    = rospy.get_param('~engage_on_startup', False)

        self.has_preroll = rospy.get_param('~use_preroll', True)
        self.has_index = rospy.get_param('~has_index', False)
        self.publish_current = rospy.get_param('~publish_current', True)
        self.publish_raw_odom = rospy.get_param('~publish_raw_odom', True)

        self.publish_odom = rospy.get_param('~publish_odom', True)
        self.publish_tf = rospy.get_param('~publish_odom_tf', False)
        self.odom_topic = rospy.get_param('~odom_topic', "odom")
        self.odom_frame = rospy.get_param('~odom_frame', "odom")
        self.base_frame = rospy.get_param('~base_frame', "base_link")
        self.odom_calc_hz = rospy.get_param('~odom_calc_hz', 25)
        self.direction = rospy.get_param('~direction', 1)
        rospy.loginfo("Parameter: axis_for_right = " +
                      str(self.axis_for_right))
        rospy.loginfo("Parameter: wheel_track = " + str(self.wheel_track))
        rospy.loginfo("Parameter: tyre_circumference = " +
                      str(self.tyre_circumference))
        rospy.loginfo("Parameter: connect_on_startup = " +
                      str(self.connect_on_startup))
        rospy.loginfo("Parameter: calibrate_on_startup = " +
                      str(self.calibrate_on_startup))
        rospy.loginfo("Parameter: engage_on_startup = " +
                      str(self.engage_on_startup))
        rospy.loginfo("Parameter: use_preroll = " + str(self.has_preroll))
        rospy.loginfo("Parameter: publish_current = " +
                      str(self.publish_current))
        rospy.loginfo("Parameter: publish_raw_odom = " +
                      str(self.publish_raw_odom))
        rospy.loginfo("Parameter: publish_odom = " + str(self.publish_odom))
        rospy.loginfo("Parameter: publish_tf = " + str(self.publish_tf))
        rospy.loginfo("Parameter: odom_topic = " + str(self.odom_topic))
        rospy.loginfo("Parameter: odom_frame = " + str(self.odom_frame))
        rospy.loginfo("Parameter: base_frame = " + str(self.base_frame))
        rospy.loginfo("Parameter: direction = " + str(self.direction))

        rospy.on_shutdown(self.terminate)

        rospy.Service('connect_driver', std_srvs.srv.Trigger,
                      self.connect_driver)
        rospy.Service('disconnect_driver', std_srvs.srv.Trigger,
                      self.disconnect_driver)

        rospy.Service('calibrate_motors', std_srvs.srv.Trigger,
                      self.calibrate_motor)
        rospy.Service('engage_motors', std_srvs.srv.Trigger, self.engage_motor)
        rospy.Service('release_motors', std_srvs.srv.Trigger,
                      self.release_motor)

        self.command_queue = Queue.Queue(maxsize=5)
        self.vel_subscribe = rospy.Subscriber("/cmd_vel",
                                              Twist,
                                              self.cmd_vel_callback,
                                              queue_size=2)

        if self.publish_current:
            self.current_loop_count = 0
            self.left_current_accumulator = 0.0
            self.right_current_accumulator = 0.0
            self.current_publisher_left = rospy.Publisher(
                'odrive/left_current', Float64, queue_size=2)
            self.current_publisher_right = rospy.Publisher(
                'odrive/right_current', Float64, queue_size=2)
            rospy.loginfo("ODrive will publish motor currents.")

        self.last_cmd_vel_time = rospy.Time.now()

        if self.publish_raw_odom:
            self.raw_odom_publisher_encoder_left = rospy.Publisher(
                'odrive/raw_odom/encoder_left', Int32,
                queue_size=2) if self.publish_raw_odom else None
            self.raw_odom_publisher_encoder_right = rospy.Publisher(
                'odrive/raw_odom/encoder_right', Int32,
                queue_size=2) if self.publish_raw_odom else None
            self.raw_odom_publisher_vel_left = rospy.Publisher(
                'odrive/raw_odom/velocity_left', Int32,
                queue_size=2) if self.publish_raw_odom else None
            self.raw_odom_publisher_vel_right = rospy.Publisher(
                'odrive/raw_odom/velocity_right', Int32,
                queue_size=2) if self.publish_raw_odom else None

        if self.publish_odom:
            rospy.Service('reset_odometry', std_srvs.srv.Trigger,
                          self.reset_odometry)
            self.old_pos_l = 0
            self.old_pos_r = 0

            self.odom_publisher = rospy.Publisher(self.odom_topic,
                                                  Odometry,
                                                  tcp_nodelay=True,
                                                  queue_size=2)
            # setup message
            self.odom_msg = Odometry()
            #print(dir(self.odom_msg))
            self.odom_msg.header.frame_id = self.odom_frame
            self.odom_msg.child_frame_id = self.base_frame
            self.odom_msg.pose.pose.position.x = 0.0
            self.odom_msg.pose.pose.position.y = 0.0
            self.odom_msg.pose.pose.position.z = 0.0  # always on the ground, we hope
            self.odom_msg.pose.pose.orientation.x = 0.0  # always vertical
            self.odom_msg.pose.pose.orientation.y = 0.0  # always vertical
            self.odom_msg.pose.pose.orientation.z = 0.0
            self.odom_msg.pose.pose.orientation.w = 1.0
            self.odom_msg.twist.twist.linear.x = 0.0
            self.odom_msg.twist.twist.linear.y = 0.0  # no sideways
            self.odom_msg.twist.twist.linear.z = 0.0  # or upwards... only forward
            self.odom_msg.twist.twist.angular.x = 0.0  # or roll
            self.odom_msg.twist.twist.angular.y = 0.0  # or pitch... only yaw
            self.odom_msg.twist.twist.angular.z = 0.0

            # store current location to be updated.
            self.x = 0.0
            self.y = 0.0
            self.theta = 0.0

            # setup transform
            self.tf_publisher = tf2_ros.TransformBroadcaster()
            self.tf_msg = TransformStamped()
            self.tf_msg.header.frame_id = self.odom_frame
            self.tf_msg.child_frame_id = self.base_frame
            self.tf_msg.transform.translation.x = 0.0
            self.tf_msg.transform.translation.y = 0.0
            self.tf_msg.transform.translation.z = 0.0
            self.tf_msg.transform.rotation.x = 0.0
            self.tf_msg.transform.rotation.y = 0.0
            self.tf_msg.transform.rotation.w = 0.0
            self.tf_msg.transform.rotation.z = 1.0
コード例 #17
0
    def odometry(self, left, right):
        lspeed = left.speed / 360.0 * self.CIRCUMFERENCE
        rspeed = right.speed / 360.0 * self.CIRCUMFERENCE
        # Compute current linear and angular speed from wheel speed
        twist = TwistWithCovariance()
        twist.twist.linear.x = (rspeed + lspeed) / 2.0
        twist.twist.angular.z = (rspeed - lspeed) / self.WIDTH

        # Compute position and orientation from travelled distance per wheel
        # http://www8.cs.umu.se/kurser/5DV122/HT13/material/Hellstrom-ForwardKinematics.pdf
        # position change per wheel in meter
        dl = (left.encoder -
              self.last_encoders['l']) / 360.0 * self.CIRCUMFERENCE
        dr = (right.encoder -
              self.last_encoders['r']) / 360.0 * self.CIRCUMFERENCE

        # set previous encoder state
        self.last_encoders['l'] = left.encoder
        self.last_encoders['r'] = right.encoder

        angle = (dr - dl) / self.WIDTH
        linear = 0.5 * (dl + dr)
        if dr != dl:
            radius = self.WIDTH / 2.0 * (dl + dr) / (dr - dl)
        else:
            radius = 0

        # old state
        old_angle = 2 * np.arccos(self.pose.pose.orientation.w)
        old_pos = np.array(
            [self.pose.pose.position.x, self.pose.pose.position.y])

        # update state
        new_angle = (old_angle + angle) % (2 * np.pi)
        new_q = quaternion_about_axis(new_angle, (0, 0, 1))
        new_angle2 = 2 * np.arccos(self.pose.pose.orientation.w)
        print("new_angle2", new_angle2)
        new_pos = np.zeros((2, ))

        if abs(angle) < 1e-6:
            direction = old_angle + angle * 0.5
            dx = linear * np.cos(direction)
            dy = linear * np.sin(direction)
        else:
            dx = +radius * (np.sin(new_angle) - np.sin(old_angle))
            dy = -radius * (np.cos(new_angle) - np.cos(old_angle))

        new_pos[0] = old_pos[0] + dx
        new_pos[1] = old_pos[1] + dy

        self.pose.pose.orientation.x = new_q[0]
        self.pose.pose.orientation.y = new_q[1]
        self.pose.pose.orientation.z = new_q[2]
        self.pose.pose.orientation.w = new_q[3]
        self.pose.pose.position.x = new_pos[0]
        self.pose.pose.position.y = new_pos[1]

        odom = Odometry(header=Header(stamp=rospy.Time.now(), frame_id="odom"),
                        child_frame_id="base_link",
                        pose=self.pose,
                        twist=twist)

        transform = TransformStamped(header=Header(stamp=rospy.Time.now(),
                                                   frame_id="world"),
                                     child_frame_id="gopigo")
        transform.transform.translation.x = self.pose.pose.position.x
        transform.transform.translation.y = self.pose.pose.position.y
        transform.transform.translation.z = self.pose.pose.position.z
        transform.transform.rotation = self.pose.pose.orientation

        return odom, transform
コード例 #18
0
    def __init__(self):
        #CREACION DE BROADCAST
        self.pub_tf = rospy.Publisher("/tf",
                                      tf2_msgs.msg.TFMessage,
                                      queue_size=5)
        self.broadcts = tf2_ros.TransformBroadcaster()
        self.transform = TransformStamped()

        #SUBSCRIPCION AL TOPICO DE COMUNICACION ENTRE TF_NODE Y CONTROLADOR_NODE
        rospy.Subscriber("/next_coord", Bool, self.callback_next)
        #rospy.Subscriber("/vel_robot",numpy_msg(Twist),self.callback_position)
        rospy.Subscriber("/rover/gps/pos", NavSatFix, self.callback_gps)
        rospy.Subscriber("/rover/imu", Imu, self.callback_imu)
        self.pub1 = rospy.Publisher("/goal", Twist, queue_size=10)

        #PARAMETROS
        self.theta = -rospy.get_param("/theta") * 3.141592 / 180.0
        self.ds = rospy.get_param("/ds")
        self.f = rospy.get_param("/f")
        self.coordenadas1 = rospy.get_param("/coordenadas")
        self.position = Float64MultiArray()
        self.pos_x = 0
        self.pos_y = 0
        self.theta = 0
        self.first_gps = 1
        self.pos_x_init = 0
        self.pos_y_init = 0
        self.angles_odom_base = [0, 0, 0, 0]
        self.angles_goal_odom = [0, 0, 0, 0]
        self.Meta = Twist()
        self.theta_z = 0

        while self.first_gps:
            self.i = 0  #VARIABLE PARA RECORRER MATRIZ SELF.TRAY (COORDENADAS DE TRAYECTORIA EN EL SISTEMA DEL ROBOT)

        self.update_coor(
        )  ##Actualizacion de coordenadas y creacion de trayectoria
        self.evadir_mpi()
        self.creacion_tray()
        #rospy.loginfo("----------------------------------------------")
        #rospy.loginfo(self.coordenadas_new)
        #rospy.loginfo("----------------------------------------------")
        #rospy.loginfo(self.tray)

        rate = rospy.Rate(self.f)

        rospy.loginfo("Nodo TF inicio correctamente ")

        while (not rospy.is_shutdown()):
            if (self.i < len(self.tray) - 1):
                self.update_odom()  ## Se envia odometria y meta en tf
                self.update_goal(self.i + 1)
                self.Meta.linear.x = self.tray[self.i + 1][0]
                self.Meta.linear.y = self.tray[self.i + 1][1]
                self.Meta.linear.z = 0.0
                self.Meta.angular.x = 0.0
                self.Meta.angular.y = 0.0
                self.Meta.angular.z = self.tray[self.i + 1][2]

                self.pub1.publish(self.Meta)

                rate.sleep()
コード例 #19
0
        filtered_to_odom = base_to_odom
        filtered_to_odom.header.frame_id = "cf1/base_link/filtered"
        filtered_to_odom.child_frame_id = "cf1/odom_new"
        self.tf_buf.set_transform(filtered_to_odom, "gandalfs_authority")
        map_to_odom = self.tf_buf.lookup_transform_core(
            "map", "cf1/odom_new", time_stamp)
        map_to_odom.child_frame_id = "cf1/odom"
        return map_to_odom


if __name__ == '__main__':
    rospy.init_node('update_map_to_odom')

    init_trans_ls = rospy.get_param("localization/initial_map_to_odom")
    #init_trans_ls = [float(s.strip()) for s in init_trans_str.split()]
    init_t = TransformStamped()
    init_t.transform.translation.x = init_trans_ls[0]
    init_t.transform.translation.y = init_trans_ls[1]
    init_t.transform.translation.z = init_trans_ls[2]

    (init_t.transform.rotation.x, init_t.transform.rotation.y,
     init_t.transform.rotation.z,
     init_t.transform.rotation.w) = quaternion_from_euler(
         init_trans_ls[3], init_trans_ls[4], init_trans_ls[5])

    # update freq should be high (at least > 20 Hz) so transforms don't get extrapolation errors in other files
    # OBS: high frequency requires outlier detection for the kalman filter to work (high freq detects noisy measurements)
    p = MapOdomUpdate(init_trans=init_t, update_freq=40)

    p.spin()
コード例 #20
0
ファイル: launch_dream_ros.py プロジェクト: ylabbe/DREAM
    def __init__(self, args, single_frame_mode=True, compute_2d_to_3d_transform=False):
        """Initialize inference engine.

            single_frame_mode:  Set this to True.  (Appears to be some sort of future-proofing by Tim.)
        """
        self.cv_image = None
        self.camera_K = None

        # TODO: -- continuous mode produces a TF at each frame
        # not continuous mode allows for several frames before producing an estimate
        self.single_frame_mode = single_frame_mode
        self.capture_frame_srv = rospy.Service(
            capture_frame_service_topic, Empty, self.on_capture_frame
        )
        self.clear_buffer_srv = rospy.Service(
            clear_buffer_service_topic, Empty, self.on_clear_buffer
        )
        self.kp_projs_raw_buffer = np.array([])
        self.kp_positions_buffer = np.array([])
        self.pnp_solution_found = False
        self.capture_frame_max_kps = True

        self.compute_2d_to_3d_transform = compute_2d_to_3d_transform

        # Create subscribers
        self.image_sub = rospy.Subscriber(
            image_topic, Image, self.on_image, queue_size=1
        )
        self.bridge = CvBridge()

        # Input argument handling
        assert os.path.exists(
            args.input_params_path
        ), 'Expected input_params_path "{}" to exist, but it does not.'.format(
            args.input_params_path
        )

        if args.input_config_path:
            input_config_path = args.input_config_path
        else:
            # Use params filepath to infer the config filepath
            input_config_path = os.path.splitext(args.input_params_path)[0] + ".yaml"

        assert os.path.exists(
            input_config_path
        ), 'Expected input_config_path "{}" to exist, but it does not.'.format(
            input_config_path
        )

        # Create parser
        print("# Opening config file:  {} ...".format(input_config_path))
        data_parser = YAML(typ="safe")

        with open(input_config_path, "r") as f:
            network_config = data_parser.load(f)

        # Overwrite GPU
        # If nothing is specified at the command line, None is the default, which uses all GPUs
        # TBD - think about a better way of doing this
        network_config["training"]["platform"]["gpu_ids"] = args.gpu_ids

        # Load network
        print("# Creating network...")
        self.dream_network = dream.create_network_from_config_data(network_config)

        print(
            "Loading network with weights from:  {} ...".format(args.input_params_path)
        )
        self.dream_network.model.load_state_dict(torch.load(args.input_params_path))
        self.dream_network.enable_evaluation()

        # Use image preprocessing specified by config by default, unless user specifies otherwise
        if args.image_preproc_override:
            self.image_preprocessing = args.image_preproc_override
        else:
            self.image_preprocessing = self.dream_network.image_preprocessing()

        # Output names used to look up keypoints in TF tree
        self.keypoint_tf_frames = self.dream_network.ros_keypoint_frames
        print("ROS keypoint frames: {}".format(self.keypoint_tf_frames))

        # Define publishers
        self.net_input_image_pub = rospy.Publisher(
            topic_out_net_input_image, Image, queue_size=1
        )
        self.image_overlay_pub = rospy.Publisher(
            topic_out_keypoint_overlay, Image, queue_size=1
        )
        self.belief_image_pub = rospy.Publisher(
            topic_out_belief_maps, Image, queue_size=1
        )
        self.kp_belief_overlay_pub = rospy.Publisher(
            topic_out_keypoint_belief_overlay, Image, queue_size=1
        )

        # Store the base frame for the TF lookup
        self.base_tf_frame = args.base_frame

        # Define TFs
        self.tfBuffer = tf2.Buffer()
        self.tf_broadcaster = tf2.TransformBroadcaster()
        self.listener = tf2.TransformListener(self.tfBuffer)
        self.camera_pose_tform = TransformStamped()

        self.camera_pose_tform.header.frame_id = self.base_tf_frame
        self.camera_pose_tform.child_frame_id = tform_out_childname

        # Subscriber for camera intrinsics topic
        self.camera_info_sub = rospy.Subscriber(
            camera_info_topic, CameraInfo, self.on_camera_info, queue_size=1
        )

        # Verbose mode
        self.verbose = args.verbose
    def callback(self, data):
        # Convert the image from OpenCV to ROS format
        #self.fps = FPS().start()

        # Filter requirements.
        order = 6
        fs = 30.0  # sample rate, Hz
        cutoff = 3.3  # desired cutoff frequency of the filter, Hz
        # Get the filter coefficients so we can check its frequency response.
        b, a = butter_lowpass(cutoff, fs, order)
        try:
            cv_image = self.bridge.imgmsg_to_cv2(data, "bgr8")
        except CvBridgeError as e:
            print(e)
        """
      Fancy code here.
    """
        global mtx, dist
        h, w = cv_image.shape[:2]
        mtx, roi = cv2.getOptimalNewCameraMatrix(mtx, dist, (w, h), 1, (w, h))

        blob = cv2.dnn.blobFromImage(cv_image,
                                     1.5,
                                     size=(300, 300),
                                     swapRB=True,
                                     crop=True)
        self.net.setInput(blob)

        #Prediction of network
        detections = self.net.forward()

        cols = 300
        rows = 300

        for i in range(detections.shape[2]):
            confidence = detections[0, 0, i, 2]  #Confidence of prediction
            if confidence > 0.8:  # Filter prediction
                class_id = int(detections[0, 0, i, 1])  # Class label
                # Object location
                xLeftBottom = int(detections[0, 0, i, 3] * cols)
                yLeftBottom = int(detections[0, 0, i, 4] * rows)
                xRightTop = int(detections[0, 0, i, 5] * cols)
                yRightTop = int(detections[0, 0, i, 6] * rows)
                # Factor for scale to original size of frame
                heightFactor = cv_image.shape[0] / 300.0
                widthFactor = cv_image.shape[1] / 300.0
                # Scale object detection to frame
                xLeftBottom = int(widthFactor * xLeftBottom)
                yLeftBottom = int(heightFactor * yLeftBottom)
                xRightTop = int(widthFactor * xRightTop)
                yRightTop = int(heightFactor * yRightTop)

                roi = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY)
                roi = roi[yLeftBottom:yRightTop, xLeftBottom:xRightTop]

                # Draw location of object
                cv2.rectangle(cv_image, (xLeftBottom, yLeftBottom),
                              (xRightTop, yRightTop), (0, 255, 0))
                if class_id in self.classNames:
                    label = self.classNames[class_id] + ": " + str(confidence)
                    labelSize, baseLine = cv2.getTextSize(
                        label, cv2.FONT_HERSHEY_SIMPLEX, 0.5, 1)
                    yLeftBottom = max(yLeftBottom, labelSize[1])
                    cv2.rectangle(
                        cv_image, (xLeftBottom, yLeftBottom - labelSize[1]),
                        (xLeftBottom + labelSize[0], yLeftBottom + baseLine),
                        (0, 255, 0), cv2.FILLED)
                    cv2.putText(cv_image, label, (xLeftBottom, yLeftBottom),
                                cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255))
                    # Use the box of the detected object for the translation estimation.
                    ''''
                image_points =np.array([(xRightTop, yRightTop),
                                        (xRightTop-(xRightTop-xLeftBottom), yRightTop),
                                        (xLeftBottom+(xRightTop-xLeftBottom), yLeftBottom),
                                        (xLeftBottom, yLeftBottom)                                                           
                                        ],dtype=np.float32)
                                        '''

                    #_,rmat,tmat= cv2.solvePnPRansac(self.model_points,image_points, mtx,dist,flags=6,iterationsCount = 500,reprojectionError = 2.0,confidence = 0.99)[:3]

                    #success,rmat,tmat= cv2.solvePnPRansac(self.model_points,image_points, mtx,dist,flags=6)[:3]#,iterationsCount = 500,reprojectionError = 2.0,confidence = 0.95)[:3]

                    #Then check ORB matching for the orientation.
                    kp_model, desc_model = self.unpickle_keypoints(
                        self.keypoints_database[class_id - 1])
                    # print(desc_model)
                    if str(class_id) not in self.dict_ekf.keys():

                        #Kalman Filter for the pose
                        self.dict_ekf[str(class_id)] = cv2.KalmanFilter(6, 6)
                        trans_mat = np.identity(6, np.float32)
                        self.dict_ekf[str(
                            class_id)].transitionMatrix = trans_mat
                        self.dict_ekf[str(
                            class_id)].measurementMatrix = trans_mat
                        self.dict_ekf[str(
                            class_id)].processNoiseCov = cv2.setIdentity(
                                self.dict_ekf[str(class_id)].processNoiseCov,
                                1e-1)  #4
                        self.dict_ekf[str(
                            class_id)].measurementNoiseCov = cv2.setIdentity(
                                self.dict_ekf[str(
                                    class_id)].measurementNoiseCov, 1e-1)  #2
                        self.dict_ekf[str(
                            class_id)].errorCovPost = cv2.setIdentity(
                                self.dict_ekf[str(
                                    class_id)].errorCovPost)  #, 1)

                    kp_image, desc_image = self.orb.detectAndCompute(roi,
                                                                     mask=None)

                    try:

                        matches = self.flann.knnMatch(desc_model,
                                                      desc_image,
                                                      k=2)
                        matchesMask = [[0, 0] for i in range(len(matches))]
                        #-- Filter matches using the Lowe's ratio test
                        good_matches = []
                        for i, (m, n) in enumerate(matches):
                            if m.distance < 0.7 * n.distance:
                                matchesMask[i] = [1, 0]
                                good_matches.append(m)
                        draw_params = dict(matchColor=(0, 0, 255),
                                           singlePointColor=(255, 0, 0),
                                           matchesMask=matchesMask,
                                           flags=cv2.DrawMatchesFlags_DEFAULT)

                        matches = good_matches  #sorted(good_matches, key = lambda x: x.distance)
                        list_kpimage = []
                        list_kpmodel = []
                        for mat in matches:
                            img1_idx = mat.queryIdx
                            img2_idx = mat.trainIdx

                            (x1, y1) = kp_model[img1_idx].pt
                            (x2, y2) = kp_image[img2_idx].pt

                            # Append to each list

                            list_kpmodel.append((x1, y1, 0.0))
                            list_kpimage.append((x2, y2))
                    except:
                        continue

                    image_points2 = np.array(list_kpimage, dtype=np.float32)
                    model_points2 = np.array(list_kpmodel, dtype=np.float32)

                    try:
                        if not (image_points2.size == 0
                                and model_points2.size == 0):

                            _, rmat, tmat = cv2.solvePnPRansac(
                                model_points2,
                                image_points2,
                                mtx,
                                dist,
                                flags=6,
                                iterationsCount=500,
                                reprojectionError=1.0,
                                confidence=0.99)[:3]
                            if np.linalg.norm(tmat) < 1000:
                                measurements = np.array(
                                    tmat, np.float32)  #.reshape(1,-1)
                                prot = np.array(rmat,
                                                np.float32)  #.reshape(1,-1)
                                measurements = np.concatenate(
                                    (measurements, prot))
                                prediction = self.dict_ekf[str(
                                    class_id)].predict()
                                estimated = self.dict_ekf[str(
                                    class_id)].correct(
                                        measurements.reshape(-1, 1))
                                rmat2 = np.array(estimated[3:6])

                            #Update the Kalman filter
                            RotMat = cv2.Rodrigues(rmat2)[0]
                            pose = np.hstack((RotMat, tmat))
                            pose = np.vstack((pose, [0, 0, 0, 1]))
                            _, invpose = cv2.invert(pose)
                            rot = invpose[0:3, 0:3]
                            #print('Pose\n',invpose[0:3,3:4])
                            tmat = invpose[0:3, 3:4]
                            rmat = cv2.Rodrigues(rot)[0]

                            if np.linalg.norm(tmat) < 10000:
                                transform = TransformStamped()
                                transform.header.frame_id = 'cf1/camera_link'
                                transform.child_frame_id = 'sign/' + self.classNames[
                                    class_id]
                                transform.header.stamp = rospy.Time.now()

                                transform.transform.translation.x = round(
                                    butter_lowpass_filter(
                                        tmat[2], cutoff, fs, order),
                                    3) + 0.2  #tmat[2]/950#+0.2
                                transform.transform.translation.y = round(
                                    butter_lowpass_filter(
                                        tmat[1], cutoff, fs, order),
                                    3) - 0.1  #tmat[1]/950#-0.1
                                transform.transform.translation.z = round(
                                    butter_lowpass_filter(
                                        tmat[0], cutoff, fs, order), 3) * 1.5

                                #a=quaternion_from_euler(rmat[0]-math.pi,rmat[1],rmat[2]+math.pi)
                                a = quaternion_from_euler(
                                    rmat[0] * math.pi / 180,
                                    rmat[1] * math.pi / 180 + math.pi,
                                    rmat[2] * math.pi / 180 - math.pi / 2)
                                transform.transform.rotation.x = a[0]
                                transform.transform.rotation.y = a[1]
                                transform.transform.rotation.z = a[2]
                                transform.transform.rotation.w = a[3]
                                #print(transform)
                                if transform.transform.translation.z > 0.15:
                                    tf_bc.sendTransform(transform)
                    except:

                        pass
        """
    Fancy code finishes here.
    """
        # Publish the image
        try:
            self.image_pub.publish(self.bridge.cv2_to_imgmsg(
                cv_image, "bgr8"))  #np.asarray(thresh_img),"mono8"))
        except CvBridgeError as e:
            print(e)
コード例 #22
0
    def __init__(self):
        self.mav_name = rospy.get_param('~mav_name', 'mav')
        # Altitude source ('vicon'/'external')
        self.altitude_input = rospy.get_param('~altitude_input', 'vicon')
        # Whether to publish pose message or not (True/False)
        self.publish_pose = rospy.get_param('~publish_pose', True)
        # Whether to publish transform message or not (True/False)
        self.publish_transform = rospy.get_param('~publish_transform', True)
        # Maximum noise radius (polar coordinates) [m]
        self.max_noise_radius = rospy.get_param('~max_noise_radius',
                                                0.0)  # max tested: 0.15
        # Timeout between noise sampling updates [s]
        self.publish_timeout = rospy.get_param('~publish_timeout', 0.2)

        self.fix = NavSatFix()
        self.fix.header.frame_id = 'fcu'
        self.fix.status.status = 1
        self.fix.status.service = 1
        self.fix.latitude = 44.0
        self.fix.longitude = 44.0
        self.fix.altitude = 0.0
        # TODO: fill GPS covariance

        self.latest_altitude_message = Float64()
        self.latest_imu_message = Imu()

        self.pwc = PoseWithCovarianceStamped()
        self.pwc.header.frame_id = 'vicon'  # doesn't really matter to MSF
        self.pwc.pose.covariance[6 * 0 + 0] = 0.000025
        self.pwc.pose.covariance[6 * 1 + 1] = 0.000025
        self.pwc.pose.covariance[6 * 2 + 2] = 0.000025
        self.pwc.pose.covariance[6 * 3 + 3] = 0.000025
        self.pwc.pose.covariance[6 * 4 + 4] = 0.000025
        self.pwc.pose.covariance[6 * 5 + 5] = 0.000025

        self.point = PointStamped()
        self.point.header = self.pwc.header

        self.transform = TransformStamped()
        self.transform.header = self.pwc.header
        self.child_frame_id = self.mav_name + '_noisy'

        self.R_noise = 0.0
        self.theta_noise = 0.0
        self.timer_pub = None
        self.timer_resample = None
        self.got_odometry = False

        self.spoofed_gps_pub = rospy.Publisher('spoofed_gps',
                                               NavSatFix,
                                               queue_size=1)
        self.pose_pub = rospy.Publisher('noisy_vicon_pose',
                                        PoseWithCovarianceStamped,
                                        queue_size=1,
                                        tcp_nodelay=True)
        self.point_pub = rospy.Publisher('noisy_vicon_point',
                                         PointStamped,
                                         queue_size=1,
                                         tcp_nodelay=True)
        self.transform_pub = rospy.Publisher('noisy_vicon_transform',
                                             TransformStamped,
                                             queue_size=1,
                                             tcp_nodelay=True)

        self.altitude_sub = rospy.Subscriber('laser_altitude',
                                             Float64,
                                             self.altitude_callback,
                                             tcp_nodelay=True)
        self.gps_sub = rospy.Subscriber('gps',
                                        NavSatFix,
                                        self.gps_callback,
                                        tcp_nodelay=True)
        self.estimated_odometry_sub = rospy.Subscriber('estimated_odometry',
                                                       Odometry,
                                                       self.odometry_callback,
                                                       queue_size=1,
                                                       tcp_nodelay=True)
        self.imu_sub = rospy.Subscriber('imu',
                                        Imu,
                                        self.imu_callback,
                                        queue_size=1,
                                        tcp_nodelay=True)
コード例 #23
0
    def image_callback(self, img: Image):
        t_start = time.time()

        if not self.camera.ready(
        ) or not self.trajectory_complete or not self.goal_post_need:
            return

        self.camera.reset_position(publish_basecamera=False,
                                   timestamp=img.header.stamp)

        image = CvBridge().imgmsg_to_cv2(img, desired_encoding="rgb8")
        hsv = cv2.cvtColor(src=image, code=cv2.COLOR_BGR2HSV)

        h = self.camera.calculateHorizonCoverArea()
        cv2.rectangle(image, [0, 0], [640, int(h * 7 / 10.0)], [0, 0, 0],
                      cv2.FILLED)

        # Field line detection
        mask2 = cv2.inRange(hsv, (0, 0, 255 - 65), (255, 65, 255))
        out = cv2.bitwise_and(image, image, mask=mask2)

        kernel = np.ones((7, 7), np.uint8)
        out = cv2.morphologyEx(out, cv2.MORPH_CLOSE, kernel)

        cdst = cv2.cvtColor(out, cv2.COLOR_BGR2GRAY)
        retval, dst = cv2.threshold(cdst, 127, 255, cv2.THRESH_BINARY)
        edges = cv2.Canny(dst, 50, 150)

        lines = cv2.HoughLinesP(
            edges,
            rho=DetectorFieldline.HOUGH_RHO,
            theta=DetectorFieldline.HOUGH_THETA,
            threshold=DetectorFieldline.HOUGH_THRESHOLD,
            minLineLength=DetectorFieldline.HOUGH_MIN_LINE_LENGTH,
            maxLineGap=DetectorFieldline.HOUGH_MAX_LINE_GAP)

        ccdst = cv2.cvtColor(cdst, cv2.COLOR_GRAY2RGB)

        if lines is None:
            return

        computed_lines = []
        vert_x_max, vert_x_min, vert_y_max, vert_y_min = 0, 1000, 0, 1000
        for l in lines:
            x1, y1, x2, y2 = l[0]
            # computing magnitude and angle of the line
            mag = np.sqrt((x2 - x1)**2. + (y2 - y1)**2.)
            if x2 == x1:
                angle = 0
            else:
                angle = np.rad2deg(np.arctan((y2 - y1) / (x2 - x1)))

            computed_lines += [(mag, angle)]

            pt1 = (x1, y1)
            pt2 = (x2, y2)
            if abs(angle) < 10:  # Horizontal
                cv2.line(ccdst,
                         pt1,
                         pt2, (255, 0, 0),
                         thickness=3,
                         lineType=cv2.LINE_AA)
            elif abs(abs(angle) - 90) < 10:  # Vertical
                cv2.line(ccdst,
                         pt1,
                         pt2, (0, 255, 0),
                         thickness=3,
                         lineType=cv2.LINE_AA)
                vert_x_max = max(vert_x_max, x1, x2)
                vert_x_min = min(vert_x_min, x1, x2)
                vert_y_max = max(vert_y_max, y1, y2)
                vert_y_min = min(vert_y_min, y1, y2)
            else:
                cv2.line(ccdst,
                         pt1,
                         pt2, (0, 0, 255),
                         thickness=3,
                         lineType=cv2.LINE_AA)

        computed_lines = np.array(computed_lines)

        # an image has a goalpost if two perpendicular lines with 0 degrees and 90 degrees intersect
        vertical_line = len(
            computed_lines[(abs(abs(computed_lines[:, 1]) - 90) < 10)]) > 0

        if vertical_line:
            w = vert_y_max - vert_y_min
            l = vert_x_max - vert_x_min
            area = w * l
            if area < 50000:
                cv2.rectangle(ccdst, [vert_x_min, vert_y_min],
                              [vert_x_max, vert_y_max], [0, 255, 255],
                              thickness=2)
                x_avg = (vert_x_max + vert_x_min) / 2
                [floor_center_x, floor_center_y,
                 _] = self.camera.findFloorCoordinate([x_avg, vert_y_max])
                [floor_close_x, floor_close_y,
                 _] = self.camera.findFloorCoordinate([x_avg, vert_y_max])

                camera_pose = self.camera.pose

                distance = (
                    (floor_center_x - camera_pose.get_position()[0])**2 +
                    (floor_center_y - camera_pose.get_position()[1])**2)**0.5
                theta = math.atan2(distance, camera_pose.get_position()[2])
                ratio = math.tan(theta)**2
                ratio2 = 1 / (1 + ratio)
                if 1 < ratio2 < 0:
                    print('here')  # TODO

                floor_x = floor_close_x * (1 -
                                           ratio2) + floor_center_x * ratio2
                floor_y = floor_close_y * (1 -
                                           ratio2) + floor_center_y * ratio2
                if floor_x > 0.0:

                    br = tf2_ros.TransformBroadcaster()
                    robot_pose = TransformStamped()
                    robot_pose.header.frame_id = self.robot_name + "/base_camera"
                    robot_pose.child_frame_id = self.robot_name + "/goal_post"
                    robot_pose.header.stamp = img.header.stamp
                    robot_pose.header.seq = img.header.seq
                    robot_pose.transform.translation.x = floor_x
                    robot_pose.transform.translation.y = floor_y
                    robot_pose.transform.translation.z = 0
                    robot_pose.transform.rotation.x = 0
                    robot_pose.transform.rotation.y = 0
                    robot_pose.transform.rotation.z = 0
                    robot_pose.transform.rotation.w = 1
                    br.sendTransform(robot_pose)

        if self.image_publisher.get_num_connections() > 0:
            img_out = CvBridge().cv2_to_imgmsg(ccdst)
            img_out.header = img.header
            self.image_publisher.publish(img_out)

        t_end = time.time()
        rospy.loginfo_throttle(
            20, "GoalPost detection rate: " + str(t_end - t_start))
コード例 #24
0
ファイル: base_driver.py プロジェクト: rospin/roboabc
    def __init__(self):
        # initialize node
        rospy.init_node("base_driver")
        rospy.on_shutdown(self.shutdown)

        # establish communication
        self.comm = None
        self.comm = serial.Serial(rospy.get_param("~port", "/dev/ttyUSB0"),
                                  rospy.get_param("~baud", 57600),
                                  timeout=1)

        # create subscriber, publisher and tf brodcaster
        rospy.Subscriber("cmd_vel", Twist, self.callback)
        odomPUB = rospy.Publisher("odom", Odometry, queue_size=10)
        odomTFB = tf.TransformBroadcaster()

        # get parameters of rate and frame_id
        rateCtrl = rospy.Rate(rospy.get_param("~rate", 10.0))
        odom_FID = rospy.get_param('~odom_frame_id', 'odom')
        base_FID = rospy.get_param('~base_frame_id', 'base_footprint')

        #
        rospy.loginfo("BaseDriver running ...")

        # prepare data structure
        odometry = Odometry()
        odometry.header.frame_id = odom_FID
        odometry.child_frame_id = base_FID
        odom_tf_ = TransformStamped()
        odom_tf_.header.frame_id = odom_FID
        odom_tf_.child_frame_id = base_FID

        # initialize data
        buff = ""
        x, y, th, v, w = 0, 0, 0, 0, 0
        now = rospy.Time.now()

        # infinate loop
        while not rospy.is_shutdown():
            now = rospy.Time.now()

            # read communation data buffer
            buff += self.comm.read(1)

            # locate and check valid data
            iBGN = buff.find(self.BGN)
            iDAT = iBGN + len(self.BGN)
            if iBGN >= 0 and len(buff[iDAT:]) >= 16:
                iEND = iBGN + 16
                if buff[iEND:iEND + len(self.END)] == self.END:
                    # get valid data frame
                    data = buff[iDAT:iDAT + 10]
                    buff = ""

                    # prase data
                    x, y, th, v, w = struct.unpack(">5h", data)
                    x /= 100.0
                    y /= 100.0
                    th = th * math.pi / 180.0
                    v /= 100.0
                    w /= 100.0
                    q = Quaternion(
                        *tf.transformations.quaternion_from_euler(0, 0, th))

                    # publish odometry msg
                    odometry.header.stamp = now
                    odometry.pose.pose.position.x = x
                    odometry.pose.pose.position.y = y
                    odometry.pose.pose.position.z = 0
                    odometry.pose.pose.orientation = q
                    odometry.twist.twist.linear.x = v
                    odometry.twist.twist.linear.y = 0
                    odometry.twist.twist.angular.z = w
                    odomPUB.publish(odometry)

                    # brodcast tf between odomtery and base
                    odom_tf_.header.stamp = rospy.Time.now()
                    odom_tf_.transform.translation.x = x
                    odom_tf_.transform.translation.y = y
                    odom_tf_.transform.translation.z = 0
                    odom_tf_.transform.rotation = q
                    odomTFB.sendTransformMessage(odom_tf_)

                    # clear communication input buffer
                    self.comm.flushInput()
                else:
                    buff = ""
                    continue
            else:
                continue

            rateCtrl.sleep()
コード例 #25
0
         [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0],
         [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0],
         [0, 0, 0], [0, 0, 0]]
quaternions = [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
               [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
               [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
               [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
               [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]
eulers = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]

v = Vector3Stamped()
v.vector.x = 0
v.vector.y = 0
v.vector.z = 1

t = TransformStamped()
print("UDP target IP: %s" % UDP_IP)
print("UDP target port: %s" % UDP_PORT)


def plane_angle(a1, b1, c1, a2, b2, c2):
    divider = math.sqrt(pow(a1, 2) + pow(b1, 2) + pow(c1, 2)) * math.sqrt(
        pow(a2, 2) + pow(b2, 2) + pow(c2, 2))
    if divider == 0:
        return 0
    else:
        result = abs(a1 * a1 + b1 * b2 + c1 * c2) / (divider)
        if result > 1:
            return math.acos(1)
        else:
            return math.acos(result)
コード例 #26
0
    def __init__(self,
                 fixed_frame="map",
                 base_link="base_link",
                 resolution=0.1,
                 output_size=[40, 40],
                 topics_list=list(),
                 threshold_value=50,
                 discard_time=120):

        self.fixed_frame = fixed_frame
        self.base_link = base_link
        self.resolution = resolution
        self.output_size = output_size
        self.output_shape = tuple(
            [int(round(i / self.resolution)) for i in self.output_size])
        '''
		 FLOAT32 used for calculation in cv2 due to internal type preference
		 Each cell hold two data [occupied_count, total_count]
		 Each observation is added to total_count, occupied observation is added to occupied_count
		'''
        self.count_map = np.zeros(self.output_shape + (3, ), np.float32)
        self.count_map[:, :,
                       1] = 0.0001  # Avoid division with zero if no observation is made

        self.count_prob_mask = np.zeros(self.output_shape, np.uint16)

        self.current_stack_signed = np.zeros(self.output_shape, np.int8)

        self.last_to_current_warp_affine_mat = np.ndarray((2, 3))

        # Using exponential decay to discard information that has not been
        # update but still in the current_stack zone
        self.discard_time = discard_time
        self.decay_rate = np.log(0.01) / discard_time
        self.decay_mask = np.ones(self.output_shape, np.float32) * 100
        self.last_decay = time.time()

        self.temp_stack = np.zeros(self.output_shape, np.uint16)
        self.threshold_value = threshold_value

        self.map_msg = OccupancyGrid()
        self.og_publisher = rospy.Publisher("test/output",
                                            OccupancyGrid,
                                            queue_size=1)

        self.tfBuffer = tf2_ros.Buffer()
        self.tf_listener = tf2_ros.TransformListener(self.tfBuffer)
        self.tf2_ros_exception = (tf2_ros.LookupException,
                                  tf2_ros.ExtrapolationException)

        self.last_current_pose = TransformStamped()
        self.last_current_pose.transform.rotation.w = 1

        # Wait for tfBuffer to work properly
        wait_time = time.time()
        print(self.base_link)
        print(self.fixed_frame)
        while True:
            if time.time() - wait_time > 2:
                rospy.logerr("Cannot get information about tf. Abort")
                rospy.signal_shutdown("Cannot get information about tf. Abort")
                break

            try:
                self.last_current_pose = self.tfBuffer.lookup_transform(
                    self.base_link, self.fixed_frame, rospy.Time(0))
            except self.tf2_ros_exception as e:
                template = "An exception of type {0} occurred at __init__. Arguments:\n{1!r}"
                message = template.format(type(e).__name__, e.args)
                rospy.logwarn_throttle(1, message)
                continue

            except tf2_ros.ConnectivityException:
                template = "An exception of type {0} occurred at __init__. Arguments:\n{1!r}"
                message = template.format(type(e).__name__, e.args)
                rospy.logwarn_throttle(1, message)
                continue

            break

        self.og_handler_holder = list()
        for topic in topics_list:
            self.og_handler_holder.append(
                OccupancyGridCallback(self.base_link, self.output_shape,
                                      self.resolution, topic))
コード例 #27
0
    def __init__(self):
        # Internal state
        self.internal_state = TransformStamped()

        ## Current State

        # Position

        self.x = 0
        self.y = 0
        self.z = 0
        self.roll = 0
        self.pitch = 0
        self.yaw = 0

        # Old position

        self.old_x = 0
        self.old_y = 0
        self.old_z = 0
        self.old_roll = 0
        self.old_pitch = 0
        self.old_yaw = 0

        # Velocity

        self.x_dot = 0
        self.y_dot = 0
        self.z_dot = 0
        self.roll_dot = 0
        self.pitch_dot = 0
        self.yaw_dot = 0

        # Old velocity

        self.old_x_dot = 0
        self.old_y_dot = 0
        self.old_z_dot = 0
        self.old_roll_dot = 0
        self.old_pitch_dot = 0
        self.old_yaw_dot = 0

        # Acceleration

        self.x_double_dot = 0
        self.y_double_dot = 0
        self.z_double_dot = 0
        self.roll_double_dot = 0
        self.pitch_double_dot = 0
        self.yaw_double_dot = 0

        ## Desired State

        # Position

        self.x_des = 0
        self.y_des = 0
        self.z_des = 0
        self.roll_des = 0
        self.pitch_des = 0
        self.yaw_des = 0

        # Velocity

        self.x_dot_des = 0
        self.y_dot_des = 0
        self.z_dot_des = 0
        self.roll_dot_des = 0
        self.pitch_dot_des = 0
        self.yaw_dot_des = 0

        # Acceleration

        self.x_double_dot = 0
        self.y_double_dot = 0
        self.z_double_dot = 0
        self.roll_double_dot = 0
        self.pitch_double_dot = 0
        self.yaw_double_dot = 0

        return
コード例 #28
0
 def __init__(self):
     self.prefix = ""
     self.first_call = True
     dt = 1.0 / 120.0
     w = 10 * 3.141592
     self.origin_x = -8.5
     self.max_slip = 0
     self.max_speed = 0
     self.cur_speed = 0
     self.cur_speed_b = 0
     self.cur_slip = 0
     self.lap_time_avg = 0
     self.lap_time_std = 0
     self.max_speed = 0
     self.avg_speed = 0
     self.x_lpf = LPF(dt, w, 0.0)
     self.y_lpf = LPF(dt, w, 0.0)
     self.z_lpf = LPF(dt, w, 0.0)
     self.xa = 0.0
     self.ya = 0.0
     self.va = 0.0
     self.xb = 0.0
     self.yb = 0.0
     self.vb = 0.0
     self.ax = 0.0
     self.ay = 0.0
     self.az = 0.0
     self.roll = 0.0
     self.roll_rate = 0.0
     self.pitch = 0.0
     self.pitch_rate = 0.0
     self.yaw = 0.0
     self.yaw_rate = 0.0
     self.heading_multiplier = 0.0
     self.roll_lpf = LPF(dt, w, 0.0)
     self.pitch_lpf = LPF(dt, w, 0.0)
     self.yaw_lpf = LPF(dt, w, 0.0)
     self.roll_rate_lpf = LPF(dt, w, 0.0)
     self.pitch_rate_lpf = LPF(dt, w, 0.0)
     self.yaw_rate_lpf = LPF(dt, w, 0.0)
     self.seq = 1
     self.cur_pose_odom = Odometry()
     self.last_pose_time = 0.0
     self.last_pose_msg = PoseStamped()
     self.last_roll = 0.0
     self.last_pitch = 0.0
     self.last_yaw = 0.0
     self.last_yaw_temp = 0.0
     self.last_yaw_rate = 0.0
     self.last_v_x = 0.0
     self.last_v_y = 0.0
     self.last_v_z = 0.0
     self.vx_lpf = LPF(dt, w, 0.0)
     self.vy_lpf = LPF(dt, w, 0.0)
     self.vz_lpf = LPF(dt, w, 0.0)
     rospy.Subscriber("pose", PoseStamped, self.poseSubCallback)
     # rospy.Subscriber("ground_pose", Pose2D, self.groundPoseSubCallback)
     # rospy.Subscriber("mppi_controller/subscribedPose", Odometry, self.mppiposeCallback)
     # rospy.Subscriber("lap_stats", pathIntegralStats, self.lapStatCallback)
     self.speed_pub = rospy.Publisher('info/linear_speed',
                                      Float64,
                                      queue_size=1)
     self.slip_pub = rospy.Publisher("info/slip_angle",
                                     Float64,
                                     queue_size=1)
     self.distance_pub = rospy.Publisher("info/distance",
                                         Float64,
                                         queue_size=1)
     self.ttc_pub = rospy.Publisher("info/ttc", Float64, queue_size=1)
     self.angle_pub = rospy.Publisher("info/angle_difference",
                                      Float64,
                                      queue_size=1)
     self.yaw_pub = rospy.Publisher("info/yaw", Float64, queue_size=1)
     self.yaw_rate_pub = rospy.Publisher("info/yaw_rate",
                                         Float64,
                                         queue_size=1)
     self.pose_odom_pub = rospy.Publisher("pose_estimate",
                                          Odometry,
                                          queue_size=1)
     self.accel_pub_x = rospy.Publisher("acceleration/x",
                                        Float64,
                                        queue_size=1)
     self.accel_pub_y = rospy.Publisher("acceleration/y",
                                        Float64,
                                        queue_size=1)
     self.accel_pub_z = rospy.Publisher("acceleration/z",
                                        Float64,
                                        queue_size=1)
     self.br = tf2_ros.TransformBroadcaster()
     self.t = TransformStamped()
コード例 #29
0
    def callback(self, data):
        rospy.loginfo("Recognized objects %d", data.objects.__len__())

        table_corners = []

        # obtain table_offset and table_pose
        to = rospy.wait_for_message(self.table_topic, MarkerArray)

        # obtain Table corners ...
        rospy.loginfo("Tables hull size %d", to.markers.__len__())
        if not to.markers:
            rospy.loginfo("No tables detected")
            return
        else:
            # NB - D says that ORK has this set to filter based on height.
            # IIRC, it's 0.6-0.8m above whatever frame is set as the floor
            point_array_size = 4      # for the 4 corners of the bounding box
            for i in range (0, point_array_size):
                p = Point()
                p.x = to.markers[0].points[i].x
                p.y = to.markers[0].points[i].y
                p.z = to.markers[0].points[i].z
                table_corners.append(p)
            # this is a table pose at the edge close to the robot, in the center of x axis
            table_pose = PoseStamped()
            table_pose.header = to.markers[0].header
            table_pose.pose = to.markers[0].pose
            
        # Determine table dimensions
        rospy.loginfo('calculating table pose bounding box in frame: %s' % table_pose.header.frame_id)
        min_x = sys.float_info.max
        min_y = sys.float_info.max
        max_x = -sys.float_info.max
        max_y = -sys.float_info.max

        for i in range (table_corners.__len__()):
            if table_corners[i].x > max_x:
                max_x = table_corners[i].x
            if table_corners[i].y > max_y:
                max_y = table_corners[i].y
            if table_corners[i].x < min_x:
                min_x = table_corners[i].x
            if table_corners[i].y < min_y:
                min_y = table_corners[i].y

        table_dim = Point()
        # TODO: if we don't (can't!) compute the height, should we at least give it non-zero depth? 
        # (would also require shifting the observed centroid down by table_dim.z/2)
        table_dim.z = 0.0

        table_dim.x = abs(max_x - min_x)
        table_dim.y = abs(max_y - min_y)
        print "Dimensions: ", table_dim.x, table_dim.y

        # Temporary frame used for transformations
        table_link = 'table_link'

        # centroid of a table in table_link frame
        centroid = PoseStamped()
        centroid.header.frame_id = table_link
        centroid.header.stamp = table_pose.header.stamp
        centroid.pose.position.x = (max_x + min_x)/2.
        centroid.pose.position.y = (max_y + min_y)/2.
        centroid.pose.position.z = 0.0
        centroid.pose.orientation.x = 0.0
        centroid.pose.orientation.y = 0.0
        centroid.pose.orientation.z = 0.0
        centroid.pose.orientation.w = 1.0

        # generate transform from table_pose to our newly-defined table_link
        tt = TransformStamped()
        tt.header = table_pose.header
        tt.child_frame_id = table_link
        tt.transform.translation = table_pose.pose.position
        tt.transform.rotation = table_pose.pose.orientation
        self.tl.setTransform(tt)
        self.tl.waitForTransform(table_link, table_pose.header.frame_id, table_pose.header.stamp, rospy.Duration(3.0))
        if self.tl.canTransform(table_pose.header.frame_id, table_link, table_pose.header.stamp):
            centroid_table_pose = self.tl.transformPose(table_pose.header.frame_id, centroid)
            self.pose_pub.publish(centroid_table_pose)
        else:
            rospy.logwarn("No transform between %s and %s possible",table_pose.header.frame_id, table_link)
            return

        # transform each object into desired frame; add to list of clusters
        cluster_list = []
        object_list = []
        #if only clusters on the table should be extracted
        if self.extract_clusters:
            cluster_list = self.extract_clusters_f()
 #else try to recognize objects
        if self.recognize_objects:
            for i in range (data.objects.__len__()):
                #            rospy.loginfo("Point clouds %s", data.objects[i].point_clouds.__len__())              
                pc = PointCloud2()
                pc = data.objects[i].point_clouds[0]
                arr = pointclouds.pointcloud2_to_array(pc, 1)
                arr_xyz = pointclouds.get_xyz_points(arr)
                
                arr_xyz_trans = []
                for j in range (arr_xyz.__len__()):
                    ps = PointStamped();
                    ps.header.frame_id = table_link
                    ps.header.stamp = table_pose.header.stamp
                    ps.point.x = arr_xyz[j][0]
                    ps.point.y = arr_xyz[j][1]
                    ps.point.z = arr_xyz[j][2]
                    if self.tl.canTransform(table_pose.header.frame_id, table_link, table_pose.header.stamp):
                        ps_in_kinect_frame = self.tl.transformPoint(table_pose.header.frame_id, ps)
                    else:
                        rospy.logwarn("No transform between %s and %s possible",table_pose.header.frame_id, table_link)
                        return
                    arr_xyz_trans.append([ps_in_kinect_frame.point.x, ps_in_kinect_frame.point.y, ps_in_kinect_frame.point.z])
                    
                pc_trans = PointCloud2()
                pc_trans = pointclouds.xyz_array_to_pointcloud2(np.asarray([arr_xyz_trans]), 
                                                                    table_pose.header.stamp, table_pose.header.frame_id)
                    
                #self.pub.publish(pc_trans)
                object_list.append(pc_trans)
        
        rospy.loginfo("object size %d", object_list.__len__())

        cluster_centroids = []
        object_centroids = []
        for cloud in range (cluster_list.__len__()):
            cluster_centroids.append(self.compute_centroid(cluster_list[cloud]))

        for cloud in range (object_list.__len__()):
            object_centroids.append(self.compute_centroid(object_list[cloud]))

        recognized_objects = []
        indices = []

        for centroid in range (cluster_centroids.__len__()):
        # dist = self.closest(np_cluster_centroids, np.asarray(cluster_centroids[centroid]))
            if object_centroids:
                indices = self.do_kdtree(np.asarray(object_centroids), np.asarray(cluster_centroids[centroid]), self.search_radius)
            if not indices:
                recognized_objects.append(0)
            else:
                recognized_objects.append(1)
        print "recognized objects: ", recognized_objects

        
        # finally - save all data in the object that'll be sent in response to actionserver requests
        with self.result_lock:
            self._result.objects = cluster_list 
            self._result.recognized_objects = recognized_objects 
            self._result.table_dims = table_dim
            self._result.table_pose = centroid_table_pose
            self.has_data = True
コード例 #30
0
def hatch_bl_tf(event):
    global h2odom, tape_visible

    br = tf2_ros.TransformBroadcaster()
    if tape_visible is False:
        h2odom.header.stamp = rospy.Time.now()
        br.sendTransform(h2odom)
        return

    try:
        bl2cam = tfBuffer.lookup_transform('base_link', 'base_camera',
                                           rospy.Time())
        odom2bl = tfBuffer.lookup_transform('odom', 'base_link', rospy.Time())
    except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
            tf2_ros.ExtrapolationException):
        #TODO: add correct exception here
        return

    bl2cam_trans_mat = (bl2cam.transform.translation.x,
                        bl2cam.transform.translation.y,
                        bl2cam.transform.translation.z)
    bl2cam_rot_mat = [
        bl2cam.transform.rotation.x, bl2cam.transform.rotation.y,
        bl2cam.transform.rotation.z, bl2cam.transform.rotation.w
    ]
    bl2cam_mat = tf_conversions.transformations.concatenate_matrices(
        tf_conversions.transformations.translation_matrix(bl2cam_trans_mat),
        tf_conversions.transformations.quaternion_matrix(bl2cam_rot_mat))

    h2cam_mat = tf_conversions.transformations.concatenate_matrices(
        tf_conversions.transformations.translation_matrix((pos_x, pos_y, 0)),
        tf_conversions.transformations.euler_matrix(0, 0, -pi))

    #hatch to base_link
    h2bl_mat = np.dot(h2cam_mat, np.linalg.inv(bl2cam_mat))

    odom2bl_trans_mat = (odom2bl.transform.translation.x,
                         odom2bl.transform.translation.y,
                         odom2bl.transform.translation.z)
    odom2bl_rot_mat = [
        odom2bl.transform.rotation.x, odom2bl.transform.rotation.y,
        odom2bl.transform.rotation.z, odom2bl.transform.rotation.w
    ]
    odom2bl_mat = tf_conversions.transformations.concatenate_matrices(
        tf_conversions.transformations.translation_matrix(odom2bl_trans_mat),
        tf_conversions.transformations.quaternion_matrix(odom2bl_rot_mat))

    h2odom_mat = np.dot(h2bl_mat, np.linalg.inv(odom2bl_mat))

    h2odom_tran = tf_conversions.transformations.translation_from_matrix(
        h2odom_mat)
    h2odom_rot = tf_conversions.transformations.quaternion_from_matrix(
        h2odom_mat)

    h2odom = TransformStamped()
    h2odom.header.stamp = rospy.Time.now()
    h2odom.header.frame_id = 'hatch_frame'
    h2odom.child_frame_id = 'odom'
    h2odom.transform.translation.x = h2odom_tran[0]
    h2odom.transform.translation.y = h2odom_tran[1]
    h2odom.transform.translation.z = h2odom_tran[2]
    h2odom.transform.rotation.x = h2odom_rot[0]
    h2odom.transform.rotation.y = h2odom_rot[1]
    h2odom.transform.rotation.z = h2odom_rot[2]
    h2odom.transform.rotation.w = h2odom_rot[3]
    br.sendTransform(h2odom)
    tape_visible = False