def on_enter(self, userdata): self._startTime = Time.now() self._failed = False self._reached = False goal_id = GoalID() goal_id.id = 'abcd' goal_id.stamp = Time.now() action_goal = MoveBaseGoal() action_goal.target_pose = userdata.waypoint action_goal.speed = userdata.speed if action_goal.target_pose.header.frame_id == "": action_goal.target_pose.header.frame_id = "world" try: self._move_client.send_goal(self._action_topic, action_goal) resp = self.set_tolerance(goal_id, 0.2, 1.55) except Exception as e: Logger.logwarn('Failed to send motion request to waypoint (%(x).3f, %(y).3f):\n%(err)s' % { 'err': str(e), 'x': userdata.waypoint.pose.position.x, 'y': userdata.waypoint.pose.position.y }) self._failed = True Logger.loginfo('Driving to next waypoint')
def gyro_cb(msg_gyro): global q, prev_time, P, INI_SET if not INI_SET: # initial state is not set return # extract data wx, wy, wz = msg_gyro.data # we need to calculate time_elapsed secs = Time.now().secs nsecs = Time.now().nsecs cur_time = secs + nsecs * 10**(-9) time_elapsed = cur_time - prev_time prev_time = cur_time # integrate using eulers integration method to find the estimates and covariance # prediction # q, P = integrateTillT(q, dt, time_elapsed, wx, wy, wz, P) current_gyro_quat = quaternion_from_euler(wx * dt, wy * dt, wz * dt) q = quaternion_multiply(current_gyro_quat, q) # make the covariance prediction P += Q rollF, pitchF, yawF = euler_from_quaternion(q) print("[GYRO_CB] rollF : {}, pitchF : {}, yawF : {}, P: \n{}".format( m.degrees(rollF), m.degrees(pitchF), m.degrees(yawF), P)) # publish to topic stamped_msg = SensorMsgStamped() stamped_msg.data = [rollF, pitchF, yawF] stamped_msg.header.stamp.secs = secs stamped_msg.header.stamp.nsecs = nsecs rpyKF_pub_stamped.publish(stamped_msg)
def main(): rospy.init_node('my_broadcaster') b = TransformBroadcaster() translation = (0.0, 0.0, 0.0) rotation = (0.0, 0.0, 0.0, 1.0) rate = rospy.Rate(5) # 5hz x, y = 0.0, 0.0 while not rospy.is_shutdown(): if x >= 2: x, y = 0.0, 0.0 x += 0.1 y += 0.1 translation = (x, y, 0.0) b.sendTransform(translation, rotation, Time.now(), '/odom', '/world') b.sendTransform(translation, rotation, Time.now(), '/laser', '/chassis') b.sendTransform(translation, rotation, Time.now(), '/camera1', '/chassis') rate.sleep()
def gyro_cb(msg_gyro): global rollG, pitchG, yawG, prev_time, q # extract data wx, wy, wz = msg_gyro.data # we need to calculate dt secs = Time.now().secs nsecs = Time.now().nsecs cur_time = secs + nsecs * 10**(-9) dt = cur_time - prev_time prev_time = cur_time # process gyroscope data current_gyro_quat = quaternion_from_euler(wx * dt, wy * dt, wz * dt) q = quaternion_multiply(current_gyro_quat, q) rollG, pitchG, yawG = euler_from_quaternion(q) # lets publish this transform # tf_br.sendTransform((0, 0, 0.5), (q[0],q[1],q[2],q[3]), Time.now(), "base_link", "world") # print the rollG, pitchG, yawG if DEBUG: print("rollG : {}, pitchG : {}, yawG : {}".format( m.degrees(rollG), m.degrees(pitchG), m.degrees(yawG))) # publish the roll pitch yaw topic stamped_msg = SensorMsgStamped() stamped_msg.data = [rollG, pitchG, yawG] stamped_msg.header.stamp.secs = secs stamped_msg.header.stamp.nsecs = nsecs rpyG_pub_stamped.publish(stamped_msg)
def echo_socket(ws): b = TransformBroadcaster() f = open("orientation.txt", "a") while True: message = ws.receive() orient_data = message.split(',') orient_data = [float(data) for data in orient_data] stamped_msg = SensorMsgStamped() stamped_msg.data = orient_data stamped_msg.header.stamp.secs = Time.now().secs stamped_msg.header.stamp.nsecs = Time.now().nsecs orient_pub_stamped.publish(stamped_msg) ### Publish to Pose topic for visualization ### q = quaternion_from_euler(orient_data[1], orient_data[2], orient_data[3]) pose_msg = Pose() pose_msg.orientation.x = q[0] pose_msg.orientation.y = q[1] pose_msg.orientation.z = q[2] pose_msg.orientation.w = q[3] pose_pub.publish(pose_msg) b.sendTransform((1, 1, 1), (q[0], q[1], q[2], q[3]), Time.now(), 'child_link', 'base_link') ### END HERE ### print("[INFO:] Orientation{}".format(orient_data)) ws.send(message) print >> f, message f.close()
def gyro_cb(msg_gyro): global q, prev_time, INI_SET if not INI_SET: return # extract data wx, wy, wz = msg_gyro.data # we need to calculate dt secs = Time.now().secs nsecs = Time.now().nsecs cur_time = secs + nsecs * 10 ** (-9) dt = cur_time - prev_time prev_time = cur_time print("the time gap is: {}".format(dt)) # process gyroscope data current_gyro_quat = quaternion_from_euler(wx * dt, wy * dt, wz * dt) q = quaternion_multiply(current_gyro_quat, q) rollF, pitchF, yawF = euler_from_quaternion(q) print("[GYRO_CB] rollF : {}, pitchF : {}, yawF : {}".format(m.degrees(rollF), m.degrees(pitchF), m.degrees(yawF))) # publish to topic stamped_msg = SensorMsgStamped() stamped_msg.data = [rollF, pitchF, yawF] stamped_msg.header.stamp.secs = secs stamped_msg.header.stamp.nsecs = nsecs rpyCF_pub_stamped.publish(stamped_msg)
def move_robot_model_rotate(): translation = (0.0, 0.0, 0.0) rotation = (0.0, 3.0, 0.0, 1.0) b = TransformBroadcaster() b.sendTransform(translation, rotation, Time.now(), 'base_link', '/map') b.sendTransform(translation, rotation, Time.now(), 'camera_link', '/map') b.sendTransform(translation, rotation, Time.now(), 'camera_link_normalized', '/map')
def auto_info(): info = Info() info.n_usages = 0 info.source = "text-json" info.creation_date = Time.now() info.last_access = Time.now() info.ltm_version = "0.0.0" info.ros_version = environ.get("ROS_DISTRO") info.os_version = ''.join(uname()) return info
def _callback(self, data): self.transformer.waitForTransform('odom_combined', 'map', Time.now(), rospy.Duration(2)) new_data = PointCloud() new_data.header.stamp = Time.now() new_data.header.frame_id = 'odom_combined' new_data.points = [ self._map_to_odom_combined(p, data.header.stamp) for p in data.points ] new_data.channels = data.channels self.publisher.publish(new_data)
def execute(self, ud): start_time = Time.now() while True: if rospy.is_shutdown(): return 'preempted' if self.preempt_requested(): self.service_preempt() return 'preempted' if Time.now() - start_time >= self.duration: return 'ok' self.cmd_vel_pub.publish(self.command()) self.rate.sleep()
def _map_to_odom_combined(self, pos): """Transforms (x, y, 0) to a Point32.""" point_in_map = PointStamped() point_in_map.header.frame_id = 'map' point_in_map.header.stamp = Time.now() point_in_map.point.x = pos[0] point_in_map.point.y = pos[1] self.transformer.waitForTransform('odom_combined', 'map', Time.now(), rospy.Duration(2)) point_in_odom = self.transformer.transformPoint('odom_combined', point_in_map) z = 0 if pos[2] == 0 else 25 return Point32(point_in_odom.point.x, point_in_odom.point.y, z)
def _msg(self, index, distance): msg = self._range_msg msg.header.frame_id = "{}_{}".format(self._name, index) msg.header.stamp = Time.now() msg.range = min(max(distance, msg.min_range), msg.max_range) return msg
def _initialise_drive_command(self): """ Build the drive command and populating some essential header fields. """ self._command = AckermannDriveStamped() self._command.header.stamp = Time.now() self._command.header.frame_id = "drive"
def __init__(self, params=(0.9, 0.0008, 0.0008), angle_speeds=None): if angle_speeds is None: angle_speeds = {20: 35, 10: 55, 0: 75} lidar_topic = '/scan' drive_topic = '/drive' self.lidar_sub = Subscriber(lidar_topic, LaserScan, self.lidar_callback) self.drive_pub = Publisher(drive_topic, AckermannDriveStamped, queue_size=5) self.d = 0.0 self.window_size = 1 self.theta = math.pi / 4 self.kp, self.ki, self.kd = params self.prev_error = 0.0 self.error = 0.0 self.square_error = 0.0 self.integral = 0.0 self.min_angle = 45 self.max_angle = 135 self.servo_offset = 90.0 self.angle = 90.0 self.velocity = 1.0 self.freq = 300 self.drive_msg = AckermannDriveStamped() self.drive_msg.header.stamp = Time.now() self.drive_msg.header.frame_id = 'laser' self.drive_msg.drive.steering_angle = 90 self.drive_msg.drive.speed = 0.0 self.speed_per_angle = angle_speeds
def _createEllipsoidMsg(self, positions, quaternions, scales): marker = Marker() marker.header.frame_id = "/base_footprint" marker.header.stamp = Time.now() marker.ns = "ellipsoid_py" marker.id = 0 marker.type = Marker.SPHERE marker.action = Marker.ADD marker.pose.position.x = positions[0] marker.pose.position.y = positions[1] marker.pose.position.z = positions[2] marker.pose.orientation.x = quaternions[0] marker.pose.orientation.y = quaternions[1] marker.pose.orientation.z = quaternions[2] marker.pose.orientation.w = quaternions[3] marker.scale.x = 2 * np.sqrt(2) * scales[ 0] #2times radius and sqrt(2) from the eigdecomposition marker.scale.y = 2 * np.sqrt(2) * scales[1] marker.scale.z = 2 * np.sqrt(2) * scales[2] marker.color.a = 0.3 marker.color.r = 1 marker.color.g = 0.3 marker.color.b = 1 return marker
def _msg(self, orient, linaccel, angvel): """ Create an Imu message :param orient: IMU orientation (x, y, z, w) :param linaccel: IMU linear acceleration (x, y, z) :param angvel: IMU angular velocity (x, y, z) :return: Imu message """ self._imu_msg.header.stamp = Time.now() self._imu_msg.orientation_covariance = (-1., )*9 self._imu_msg.linear_acceleration_covariance = (-1., )*9 self._imu_msg.angular_velocity_covariance = (-1., )*9 self._imu_msg.orientation.w = orient.w self._imu_msg.orientation.x = orient.x self._imu_msg.orientation.y = orient.y self._imu_msg.orientation.z = orient.z self._imu_msg.linear_acceleration.x = linaccel.x self._imu_msg.linear_acceleration.y = linaccel.y self._imu_msg.linear_acceleration.z = linaccel.z self._imu_msg.angular_velocity.x = angvel.x self._imu_msg.angular_velocity.y = angvel.y self._imu_msg.angular_velocity.z = angvel.z return self._imu_msg
def __init__(self, name, frame_id, min_angle, max_angle, angle_increment, min_range, max_range, scan_time=None, time_increment=None, queue_size=10): # Create the publisher self._publisher = Publisher(name, LaserScan, queue_size=queue_size) # Create a the laser scan message. Most of the message is static, the dynamic # fields are updated in _msg(). self._scan_msg = LaserScan() self._scan_msg.header.frame_id = frame_id self._scan_msg.angle_max = max_angle self._scan_msg.angle_min = min_angle self._scan_msg.angle_increment = angle_increment self._scan_msg.range_min = float(min_range) self._scan_msg.range_max = float(max_range) # Note: scan_time is the time between scans of the laser, i.e., the time it takes to read 360 degrees. self._scan_msg.scan_time = scan_time # Note: time_increment is the time between measurements, i.e. how often we read the scan and publish it (in # seconds) self._scan_msg.time_increment = time_increment # Per ROS documentation, the intensities filed should remain empty unless utilized by the sensor self._scan_msg.intensities = [] # Storage of the last time for calculating the scan time self._last_scan_time = Time.now()
def call_back(timer): current_time = rospy.Time.now() scan = LaserScan() scan.header.stamp = current_time scan.header.frame_id = 'laser_frame' scan.angle_min = 0 scan.angle_max = 2 * math.pi scan.angle_increment = 2 * math.pi / num_readings scan.time_increment = (1.0 / laser_frequency) / (num_readings) scan.range_min = 0.0 scan.range_max = 100.0 scan.ranges = [] scan.intensities = [] if multiranger.front == None: scan.ranges.append(0) else: scan.ranges.append(multiranger.front) #scan.intensities.append(1) scan.ranges.append(multiranger.left) # #scan.intensities.append(1) scan.ranges.append(multiranger.back) # #scan.intensities.append(1) scan.ranges.append(multiranger.right) #scan.intensities.append(1) #print(x,y) b.sendTransform( (x, y, 0.0), transformations.quaternion_from_euler(0, 0, yaw[0] * (math.pi / 180)), Time.now(), '/base_link', '/odom') scan_pub.publish(scan)
def on_enter(self, userdata): # This method is called when the state becomes active, i.e. a transition from another state to this one is taken. # It is primarily used to start actions which are associated with this state. # The following code is just for illustrating how the behavior logger works. # Text logged by the behavior logger is sent to the operator and displayed in the GUI. ##time_to_wait = rospy.Time.now() - self._start_time - self._target_time ##if time_to_wait > 0: ##Logger.loginfo('Need to wait for %.1f seconds.' % time_to_wait) goal = MoveBaseActionGoal() goal.header.stamp = Time.now() goal.header.frame_id = userdata.frameId goal.goal.target_pose.pose.position = userdata.pose.position goal.goal.distance = userdata.params['distance'] # "straighten up" given orientation yaw = euler_from_quaternion([userdata.pose.orientation.x, userdata.pose.orientation.y, userdata.pose.orientation.z, self.pose.orientation.w])[2] goal.goal.target_pose.pose.orientation.x = 0 goal.goal.target_pose.pose.orientation.y = 0 goal.goal.target_pose.pose.orientation.z = math.sin(yaw/2) goal.goal.target_pose.pose.orientation.w = math.cos(yaw/2) goal.goal.target_pose.header.frame_id = userdata.frameId goal.goal_id.id = 'driveTo' + str(goal.header.stamp.secs) + '.' + str(goal.header.stamp.nsecs) goal.goal_id.stamp = goal.header.stamp userdata.goalId = goal.goal_id.id self._pub.publish(self._topic_move_base_goal, goal) return 'succeeded'
def getGroundTruthImg(count=0, time=Time.now(), need_compress=False): responds = client.simGetImages([ airsim.ImageRequest("RGB_Left", airsim.ImageType.DepthPlanar, pixels_as_float=True, compress=need_compress), airsim.ImageRequest("RGB_Left", airsim.ImageType.Segmentation, compress=need_compress) ]) # get numpy array img_depth = airsim.list_to_2d_float_array(responds[0].image_data_float, responds[0].width, responds[0].height) img_buf = np.frombuffer(responds[1].image_data_uint8, dtype=np.uint8) imgR_seg = img_buf.reshape(responds[1].height, responds[1].width, 3) msg_depth = bridge.cv2_to_imgmsg(cvim=img_depth, encoding="passthrough") msg_depth.header.seq = count msg_depth.header.stamp.set(time.secs, time.nsecs) msg_seg = bridge.cv2_to_imgmsg(cvim=imgR_seg, encoding="bgr8") msg_seg.header.seq = count msg_seg.header.stamp.set(time.secs, time.nsecs) return msg_depth, msg_seg
def got_accel_mag(msg_accel, msg_mag): ## GET ROLL AND PITCH FROM THE ACCELEROMETER ax, ay, az = msg_accel.data mx, my, mz = msg_mag.data ## NORMALIZE THE ACCELEROMETER DATA norm_accel = m.sqrt(ax**2 + ay**2 + az**2) ax = ax / norm_accel ay = ay / norm_accel az = az / norm_accel # ## NORMALIZE THE MAGNETOMETER DATA # norm_mag = m.sqrt(mx**2 + my**2 + mz**2) # mx = mx / norm_mag # my = my / norm_mag # mz = mz / norm_mag ## GET THE ROLL AND PITCH roll = m.atan2(ay, az) pitch = m.atan2(-ax, m.sqrt(ay**2 + az**2)) ## COMPENSATE FOR YAW USING MAGNETOMETER Mx = mx * m.cos(pitch) + mz * m.sin(pitch) My = mx * m.sin(roll) * m.sin(pitch) + my * m.cos(roll) - mz * m.sin( roll) * m.cos(pitch) yaw = m.atan2(-My, Mx) ## CONVERT TO quaternion_from_euler q = quaternion_from_euler(roll, pitch, yaw) ## LETS PUBLISH THIS TO THE BROADCASTER tf_br.sendTransform((1, 1, 1), (q[0], q[1], q[2], q[3]), Time.now(), 'phone', 'world')
def _get_transforms(self, time=None): """ Samples the transforms at the given time. :param time: sampling time (now if None) :type time: None|Time :rtype: dict[str, ((float, float, float), (float, float, float, float))] """ if time is None: time = Time.now() # here we trick the library (it is actually made for eye_on_hand only). Trust me, I'm an engineer if self.handeye_parameters.eye_on_hand: rob = self.tfBuffer.lookup_transform( self.handeye_parameters.robot_base_frame, self.handeye_parameters.robot_effector_frame, time, Duration(10)) else: rob = self.tfBuffer.lookup_transform( self.handeye_parameters.robot_effector_frame, self.handeye_parameters.robot_base_frame, time, Duration(10)) opt = self.tfBuffer.lookup_transform( self.handeye_parameters.tracking_base_frame, self.handeye_parameters.tracking_marker_frame, time, Duration(10)) return {'robot': rob, 'optical': opt}
def main(): rospy.init_node('Odometria_Xbox') b = TransformBroadcaster() translation = (0.0, 0.0, 0.0) rotation = (0.0, 0.0, 0.0, 1.0) #quaternion rate = rospy.Rate(5) # 5hz yant = 0.0 xant = 0.0 while not rospy.is_shutdown(): #y = math.degrees(math.asin(joy.leftX())) #+ yant y = joy.leftX() + yant x = joy.leftY() + xant translation = (x, 0.0, 0.0) rotation = tf.transformations.quaternion_from_euler(0, 0, y) yant = y xant = x #print(y) b.sendTransform(translation, rotation, Time.now(), 'camera_link', 'odom') rate.sleep()
def visualize_poop(x,y,z,color,frame,ns): msg = Marker() msg.header = Header(stamp=Time.now(), frame_id=frame) #msg.scale = Vector3(x=0.02, y=0.02, z=0.02) # for sphere msg.scale = Vector3(x=0.005, y=0.04, z=0.0) # for arrow #msg.pose.position = Point(x=x, y=y, z=z) #msg.pose.position = Point(x=x, y=y, z=z+0.15) # arrow #msg.pose.orientation = Quaternion(x=0, y=0, z=0, w=1) # arrow #msg.pose.orientation = Quaternion(x=0.707, y=0, z=0, w=0.707) msg.points = [Point(x=x, y=y,z=z+0.15), Point(x=x,y=y,z=z)] msg.ns = ns msg.id = 0 msg.action = 0 # add #msg.type = 2 # sphere msg.type = 0 # arrow msg.color = ColorRGBA(r=0, g=0, b=0, a=1) if color == 0: msg.color.g = 1; elif color == 1: msg.color.b = 1; elif color == 2: msg.color.r = 1; msg.color.g = 1; elif color == 3: msg.color.g = 1; msg.color.b = 1; #loginfo("Publishing %s marker at %0.3f %0.3f %0.3f",ns,x,y,z) viz_pub.publish(msg)
def publish(pub, signal_level, quiet): # Log and publish the wifi signal value if (not quiet): loginfo('---------- Wifi Signal ------------') loginfo('Signal Level: ' + str(signal_level) + ' dBm') toPublish = WifiData() toPublish.data = int(signal_level) toPublish.header = Header(stamp=Time.now()) #sequential id is automatically set, frame id doesn't matter for this pub.publish(toPublish)
def echo_socket(ws): f = open("accelerometer.txt", "a") while True: message = ws.receive() accel_data = message.split(',') accel_data = [float(data) for data in accel_data] stamped_msg = SensorMsgStamped() stamped_msg.data = accel_data stamped_msg.header.stamp.secs = Time.now().secs stamped_msg.header.stamp.nsecs = Time.now().nsecs accel_pub_stamped.publish(stamped_msg) print("[INFO:] Accelerometer{}".format(accel_data)) ws.send(message) print >> f, message f.close()
def echo_socket(ws): f = open("gyroscope.txt", "a") while True: message = ws.receive() gyro_data = message.split(',') gyro_data = [float(data) for data in gyro_data] stamped_msg = SensorMsgStamped() stamped_msg.data = gyro_data stamped_msg.header.stamp.secs = Time.now().secs stamped_msg.header.stamp.nsecs = Time.now().nsecs gyro_pub_stamped.publish(stamped_msg) print("[INFO:] Gyroscope{}".format(gyro_data)) ws.send(message) print >> f, message f.close()
def pose_callback(self, msg): self.ts.header.stamp = Time.now() self.ts.transform.translation.x = msg.pose.position.x self.ts.transform.translation.y = msg.pose.position.y self.ts.transform.translation.z = msg.pose.position.z self.ts.transform.rotation.x = msg.pose.orientation.x self.ts.transform.rotation.y = msg.pose.orientation.y self.ts.transform.rotation.z = msg.pose.orientation.z self.ts.transform.rotation.w = msg.pose.orientation.w self.tf_broadcaster.sendTransform(self.ts)
def _broadcastTransform(self, Pose, parentName, childName): br = TransformBroadcaster() tr = TransformStamped() tr.header.stamp = Time.now() tr.header.frame_id = parentName tr.child_frame_id = childName tr.transform.translation = Pose.position tr.transform.rotation = Pose.orientation br.sendTransform(tr)
def echo_socket(ws): global geoloc_data, geoloc_pub f = open("geolocation.txt", "a") while True: message = ws.receive() geoloc_data = message.split(',') geoloc_data = [float(data) for data in geoloc_data] stamped_msg = SensorMsgStamped() stamped_msg.data = geoloc_data stamped_msg.header.stamp.secs = Time.now().secs stamped_msg.header.stamp.nsecs = Time.now().nsecs geoloc_pub_stamped.publish(stamped_msg) print("[INFO:] Geolocation{}".format(geoloc_data)) ws.send(message) print >> f, message f.close()
def echo_socket(ws): global magneto_data, magneto_pub f = open("magnetometer.txt", "a") while True: message = ws.receive() magneto_data = message.split(',') magneto_data = [float(data) for data in magneto_data] stamped_msg = SensorMsgStamped() stamped_msg.data = magneto_data stamped_msg.header.stamp.secs = Time.now().secs stamped_msg.header.stamp.nsecs = Time.now().nsecs magneto_pub_stamped.publish(stamped_msg) print("[INFO:] Magnetometer{}".format(magneto_data)) ws.send(message) print >> f, message f.close()
def __init__(self): min_range = get_param("Infrared Min Range") max_range = get_param("Infrared Max Range") ScanSensor.__init__(self, "infrared_scan", "infrared_array", min_range, max_range) self._last_scan_time = Time.now() self._max_range = max_range
def __init__(self): min_range = get_param("Ultrasonic Min Range") max_range = get_param("Ultrasonic Max Range") ScanSensor.__init__(self, "ultrasonic_scan", "ultrasonic_array", min_range, max_range) self._last_scan_time = Time.now() self._max_range = max_range
def _msg(self, ranges, intensities, scan_time): new_time = Time.now() delta_time = new_time - self._last_time self._last_time = new_time msg = LaserScan() msg.header.frame_id = self._frame_id msg.header.stamp = Time.now() msg.angle_max = self.__MAX_ANGLE msg.angle_min = self.__MIN_ANGLE msg.angle_increment = self.__ANGLE_INCREMENT # Note: time_increment is the time between measurements, i.e. how often we read the scan and publish it (in # seconds) msg.time_increment = 0.1 #delta_time.secs # Note: scan_time is the time between scans of the laser, i.e., the time it takes to read 360 degrees. msg.scan_time = 0.1 # scan_time msg.range_min = float(self._min_range) msg.range_max = float(self._max_range) msg.ranges = [min(max(range, msg.range_min), msg.range_max) for range in ranges] msg.intensities = intensities return msg
def __init__(self, pub_name, frame_id, min_range, max_range): self._publisher = Publisher(pub_name, LaserScan, queue_size=10) self._frame_id = frame_id self._min_range = min_range self._max_range = max_range self._last_time = Time.now() try: self._hal_proxy = BaseHALProxy() except BaseHALProxyError: raise ScanSensorError("Unable to create HAL")
def execute(self, userdata): # This method is called periodically while the state is active. # Main purpose is to check state conditions and trigger a corresponding outcome. # If no outcome is returned, the state will stay active. current_obj = self._sub.get_last_msg(self._objectTopic) if current_obj: if current_obj.info.class_id == 'victim' and current_obj.state.state == 2 and current_obj.info.object_id != 'victim_0': userdata.pose = PoseStamped() userdata.pose.pose = current_obj.pose.pose userdata.pose.header.stamp = Time.now() userdata.pose.header.frame_id = 'map' Logger.loginfo(current_obj.info.object_id) return 'found'
def reset_pose(self): """Sets the current pose to START. Doesn't move the robot.""" loginfo("Resetting pose.") req = PoseWithCovarianceStamped() req.header = Header(stamp=Time.now(), frame_id='/map') req.pose.pose = self._x_y_yaw_to_pose(START_X, START_Y, START_YAW) req.pose.covariance = [0.25, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.25, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06853891945200942, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] self.initial_pose_pub.publish(req) self.go_to_start()
def visualize_base_ray(): msg = Marker() msg.header = Header(stamp=Time.now(), frame_id="base_footprint") msg.scale = Vector3(x=0.005, y=0.0, z=0.0) # only x is used msg.pose.position = Point(x=0, y=0, z=0) # arrow msg.pose.orientation = Quaternion(x=0, y=0, z=0, w=1) msg.points = [Point(x=0, y=0,z=0.01), Point(x=WORKING_DIST_FROM_POOP,y=0,z=0.01)] msg.ns = "base_ray" msg.id = 0 msg.action = 0 # add msg.type = 4 # line strip msg.color = ColorRGBA(r=0, g=0, b=0, a=1) msg.color.g = 0.5; msg.color.b = 1; viz_pub.publish(msg)
def execute(self, userdata): current_obj = self._sub.get_last_msg(self._objectTopic) if current_obj: if current_obj.info.class_id == userdata.type and current_obj.state.state == userdata.state: userdata.pose = PoseStamped() userdata.pose.pose = current_obj.pose.pose userdata.pose.header.stamp = Time.now() userdata.pose.header.frame_id = 'map' userdata.object_id = current_obj.info.object_id Logger.loginfo('detected %(x)s' % { 'x': current_obj.info.object_id }) return 'found'
def go_to(self, x_map, y_map, yaw_map): """Moves to the given x, y, and yaw in map coordinates.""" loginfo("Going to pose x = %s, y = %s, yaw = %s." % (x_map, y_map, yaw_map)) goal = MoveBaseGoal() goal.target_pose.header = Header(stamp=Time.now(), frame_id = '/map') goal.target_pose.pose = self._x_y_yaw_to_pose(x_map, y_map, yaw_map) self.move_base_ac.send_goal(goal) loginfo("Send goal to move base. Waiting for result.") self.move_base_ac.wait_for_result() #loginfo("Got result: %s" % self.move_base_ac.get_result()) #loginfo("Pose: %s, %s, %s" % # (self.get_x_map(), self.get_y_map(), self.get_yaw_map())) sleep(1) loginfo("At Goal: %i", self._at_goal) return self._at_goal
def move_to_caller_goal_cb(userdata, nav_goal): """ Returns a MoveBaseGoal object with the position where the movement was detected and set userdata.out_caller_position variable with a PoseStamped object """ move_base_goal = MoveBaseGoal() move_base_goal.target_pose.header.frame_id = "/map" pose_detected = pose_at_distance(userdata.in_goal_position, 0.5) pose_detected.position.z = 0 teta = math.atan2(pose_detected.position.y, pose_detected.position.x) pose_detected.orientation = Quaternion(*quaternion_from_euler(0, 0, teta)) pose = transform_pose(pose_detected, "/base_link", "/map") move_base_goal.target_pose.pose = pose userdata.out_caller_position = move_base_goal.target_pose move_base_goal.target_pose.header.stamp = Time.now() return move_base_goal
def _get_data(self): new_time = Time.now() scan_time = new_time - self._last_scan_time self._last_scan_time = new_time values = self._hal_proxy.GetUltrasonic() ranges = [self._max_range]*360 for ii in range(len(values)): offset = UltrasonicScanSensor._OFFSETS[ii] # Note: There are duplicates in the offsets, e.g., sensors at the same angle but different height. I think # we want to take the smallest value. ranges[offset] = min(values[ii], ranges[offset]) # Note: scan_time is the time between scans of the laser, i.e., the time it takes to read 360 degrees. For the # xv11 laser, scan_time comes from the driver. For the purposes of simulating a laser scan, we'll just use the # time between calls to publish return ranges, [0]*360, scan_time.secs
def rotate_goal_cb(userdata, nav_goal): """ Send a MoveBaseGoal object to rotate the robot """ pose = Pose() pose.position = Point(0, 0, 0) if rotation_left is True: pose.orientation = Quaternion(*quaternion_from_euler(0, 0, DEGREES_IN_RADIANS)) loginfo(C.BACKGROUND_GREEN + "ROTATING ROBOT TO LEFT" + C.NATIVE_COLOR) else: pose.orientation = Quaternion(*quaternion_from_euler(0, 0, -DEGREES_IN_RADIANS)) loginfo(C.BACKGROUND_GREEN + "ROTATING ROBOT TO RIGHT" + C.NATIVE_COLOR) move_base_goal = MoveBaseGoal() move_base_goal.target_pose.header.stamp = Time.now() move_base_goal.target_pose.header.frame_id = "/base_link" move_base_goal.target_pose.pose = pose return move_base_goal
def on_enter(self, userdata): speedValue = self._dynrec.get_configuration(timeout = 0.5) if speedValue is not None: self._defaultspeed = speedValue['speed'] self._dynrec.update_configuration({'speed':userdata.speed}) self._startTime = Time.now() self._failed = False self._reached = False #goal_id = GoalID() #goal_id.id = 'abcd' #goal_id.stamp = Time.now() action_goal = MoveBaseGoal() action_goal.target_pose = userdata.waypoint action_goal.speed = userdata.speed action_goal.reverse_allowed = self._allow_backwards if action_goal.target_pose.header.frame_id == "": action_goal.target_pose.header.frame_id = "world" try: self._move_client.send_goal(self._action_topic, action_goal) #resp = self.set_tolerance(goal_id, 0.2, 1.55) except Exception as e: Logger.logwarn('Failed to send motion request to waypoint (%(x).3f, %(y).3f):\n%(err)s' % { 'err': str(e), 'x': userdata.waypoint.pose.position.x, 'y': userdata.waypoint.pose.position.y }) self._failed = True Logger.loginfo('Driving to next waypoint')
def execute(self, userdata): ''' Execute this state ''' if self._failed: return 'failed' if self._reached: return 'reached' if self._move_client.has_result(self._action_topic): result = self._move_client.get_result(self._action_topic) if result.result == 1: self._reached = True return 'reached' else: self._failed = True Logger.logwarn('Failed to reach waypoint!') return 'failed' self._currentTime = Time.now() if self._currentTime.secs - self._startTime.secs >= 10: return 'update'
def publish(self, active_keys): logdebug("Publishing!") # in_event = self._device.read_one() # while in_event is None: # # rospy.sleep(0.1) # in_event = self._device.read_one() # if in_event is None: # continue # if in_event.type == ecodes.EV_KEY: # break # else: # in_event = None # if in_event is None: # return # if in_event.type == ecodes.EV_KEY: msg = Joy(header=Header(stamp=Time.now())) msg.axes = [0.0] * self._axes_count msg.buttons = [0] * self._button_count # active_keys = self._device.active_keys(verbose=False) loginfo("active_keys : {}".format(active_keys)) for k in active_keys: msg.buttons[key_map[k]] = 1 loginfo("msg.buttons : {}".format(msg.buttons)) self._pub.publish(msg)
def handler(vrpn): """Handles the vrpn input""" startTime = vrpn.header.stamp.to_sec() position = [vrpn.pose.position.z, vrpn.pose.position.x, vrpn.pose.position.y] current_rotation = [vrpn.pose.orientation.z, -vrpn.pose.orientation.x, vrpn.pose.orientation.y, vrpn.pose.orientation.w] pos_pose = PoseStamped() pos_pose.pose.position.x = position[0] pos_pose.pose.position.y = position[1] pos_pose.pose.position.z = position[2] pos_pose.pose.orientation.x = current_rotation[0] pos_pose.pose.orientation.y = current_rotation[1] pos_pose.pose.orientation.z = current_rotation[2] pos_pose.pose.orientation.w = current_rotation[3] pos_pose.header.stamp = vrpn.header.stamp pos_pose.header.frame_id = "world" POS_PUB.publish(pos_pose) global num_observations num_observations += 1 global sum_observations sum_observations[0] = target[0] - position[0] sum_observations[1] = target[1] - position[1] sum_observations[2] = target[2] - position[2] global square_observations square_observations[0] += (target[0] - position[0]) ** 2 square_observations[1] += (target[1] - position[1]) ** 2 square_observations[2] += (target[2] - position[2]) ** 2 # Updating Marker Position marker = PoseStamped() marker.header.frame_id = "world" marker.pose.position.x = target[0] marker.pose.position.y = target[1] marker.pose.position.z = target[2] # marker.pose.orientation.x = target_rotation[0] # marker.pose.orientation.y = target_rotation[1] # marker.pose.orientation.z = target_rotation[2] # marker.pose.orientation.w = target_rotation[3] MARKER_PUB.publish(marker) time = vrpn.header.stamp.to_sec() message = Twist() # These are set to arbitrary values to disable hover mode message.angular.x = 50 message.angular.y = 200 # Calc values # x, y, z, rotation p = [calc_p_control(kpx, target[0], position[0]), calc_p_control(kpy, target[1], position[1]), calc_p_control(kpz, target[2], position[2]), calc_p_control_angle(kp_rotation, target_rotation, current_rotation)] i = [0, 0, 0, 0] d = [calc_d_control(kdx, target[0], position[0], prev_position[0], time, prev_time, 0), calc_d_control(kdy, target[1], position[1], prev_position[1], time, prev_time, 1), calc_d_control(kdz, target[2], position[2], prev_position[2], time, prev_time, 2), 0] # if abs(p[0]) < 0.001: # p[0] = 0 # if abs(p[1]) < 0.001: # p[1] = 0 # if abs(d[0]) < 0.001: # d[0] = 0 # if abs(d[1]) < 0.001: # d[1] = 0 offset_angle = calc_offset_angle(current_rotation) rot_p = global_to_drone_coordinates(p, offset_angle) rot_i = global_to_drone_coordinates(i, offset_angle) rot_d = global_to_drone_coordinates(d, offset_angle) message.linear.x = rot_p[0] + rot_i[0] + rot_d[0] message.linear.y = rot_p[1] + rot_i[1] + rot_d[1] message.linear.z = rot_p[2] + rot_i[2] + rot_d[2] message.angular.z = rot_p[3] + rot_i[3] + rot_d[3] # print "%f\t%f\t%f\t%f\t%f" % (p[0], p[1], d[0], d[1], p[3]) # print position # print message.angular.z now = Time.now() # print now.to_sec() - startTime TWIST_PUB.publish(message) now = Time.now() # print now.to_sec() - startTime path.poses.append(pos_pose) PATH_PUB.publish(path) global prev_position prev_position = position global prev_time prev_time = time # Landing at location if land_on_block: dist = calc_distance(position) print dist if dist < 0.02: print "######################" LAND_PUB.publish(Empty()) print calc_distance(position)