def detach_remove_box(self, current_package, timeout=4):
     if rospy.get_param(
             '/ur5_2_vacuum_gripper_service') == True or rospy.get_param(
                 '/conveyor_belt_service') == True:
         rospy.loginfo_once("Waiting for Service")
         rospy.sleep(1)
     rospy.set_param('/ur5_1_vacuum_gripper_service', True)
     self._scene.remove_attached_object(self._eef_link,
                                        name=current_package)
     try:
         rospy.wait_for_service(
             '/eyrc/vb/ur5/activate_vacuum_gripper/ur5_1')
         self.attach = rospy.ServiceProxy(
             'eyrc/vb/ur5/activate_vacuum_gripper/ur5_1', vacuumGripper)
         self.attach(False)
         rospy.set_param('/ur5_1_vacuum_gripper_service', False)
     except rospy.ServiceException, e:
         print "Service call failed: %s" % e
         print "Trying again to connect service"
         rospy.wait_for_service(
             '/eyrc/vb/ur5/activate_vacuum_gripper/ur5_1')
         rospy.set_param('/ur5_1_vacuum_gripper_service', True)
         self.attach = rospy.ServiceProxy(
             'eyrc/vb/ur5/activate_vacuum_gripper/ur5_1', vacuumGripper)
         self.attach(False)
         rospy.set_param('/ur5_1_vacuum_gripper_service', False)
Esempio n. 2
0
    def poses_callback(self, pose_msg, identity):
        """Callback for poses of other agents"""
        rospy.loginfo_once('Got other poses callback')
        if self.pose is None:
            return

        target_frame = f'drone_{self.my_id}/base_link'
        source_frame = 'world'

        try:
            transform = self.buffer.lookup_transform(target_frame=target_frame,
                                                     source_frame=source_frame,
                                                     time=rospy.Time(0))
        except Exception as e:
            print(e)
            return

        point = PointStamped()
        point.header = pose_msg.header
        point.point.x = pose_msg.pose.position.x
        point.point.y = pose_msg.pose.position.y
        point.point.z = pose_msg.pose.position.z

        point_tf = tf2_geometry_msgs.do_transform_point(point, transform)
        p = point_tf.point

        self.poses[identity] = np.array([p.x, p.y, p.z])
    def __init__(self):
        """ Creates an instance of the ROS node.
        """
        rospy.init_node('image_twirl')

        self.twirl_config_filepath = rospy.get_param('~twirl_config_filepath')
        self.twirler = imageTwirl(self.twirl_config_filepath)

        # Setup the publisher and subscriber.
        sub_topic = rospy.get_param('~image_input_topic')
        pub_topic = rospy.get_param('~image_output_topic')

        self.img_sub = rospy.Subscriber(sub_topic, Image, self.camera_callback)

        self.img_pub = rospy.Publisher(pub_topic,
                                       Image,
                                       queue_size=3,
                                       latch=True)

        # Converts between ROS Image messages and OpenCV images.
        self.bridge = CvBridge()

        # Specifies the rate (Hz) of publishers and other rospy processes.
        rospy.Rate(10)

        rospy.loginfo_once("The image_twirl node is initialized.")
Esempio n. 4
0
 def image_callback(self, image_msg, i):
     rospy.loginfo_once('Image callback')
     self.image_messages[i] = image_msg
     self.image_updated[i] = True
     if all(self.image_updated):
         self.image_updated = [False for _ in self.input_images]
         self.callback(*self.image_messages[:])
Esempio n. 5
0
    def callbackRadiationSource(self, data):

        if not self.is_initialized:
            return

        rospy.loginfo_once('[ComptonCamera]: getting radiation')
        self.got_radiation = True

        if data.id in self.rad_sources_ids:

            idx = self.rad_sources_ids.get(data.id)
            self.rad_sources[idx-1].position = np.array([data.world_pos.x, data.world_pos.y, data.world_pos.z])
            self.rad_sources[idx-1].last_update = rospy.Time.now()

        else:

            rospy.loginfo('[ComptonCamera]: registering new source with ID {}'.format(data.id))

            if not data.material in materials.radiation_sources:
                rospy.logerr('the material {} is not in the database of radiation sources'.format(data.material))
            else:

                new_source = Source(materials.radiation_sources[data.material].photon_energy, data.activity, np.array([data.world_pos.x, data.world_pos.y, data.world_pos.z]), data.id)
                self.rad_sources.append(new_source)
                self.rad_sources_ids[data.id] = len(self.rad_sources)
Esempio n. 6
0
def led_callback(req):
    if req.state == "stop":
        color = '#ff0000'
        frequency = 0
        data = True
    elif req.state == "betriebsbereit":
        color = "#1aff00"
        frequency = 0
        data = True
    elif req.state == "arbeitet":
        color = "#000dff"
        frequency = 1
        data = True
    elif req.state == "pausiert":
        color = "#ff8800'"
        frequency = 0
        data = True
    else:
        rospy.loginfo_once("The desired State is not defined!!!")
        color = '#ff0000'
        frequency = 0
        data = False
    print data
    print "ledResponse"
    print ledResponse
    with grpc.insecure_channel('localhost:50051') as channel:
        stub = led_serv.LEDStateIndicatorStub(channel)
        stub.setLEDState(led_msg.LEDState(color=color, frequency=frequency))
        #stub.setLEDState(led_msg.LEDState(color='#ff0000', frequency=0))
    return ledResponse(data)
Esempio n. 7
0
    def callbackOdometry(self, data):

        if not self.is_initialized:
            return

        rospy.loginfo_once('[ComptonCamera]: getting odometry')
        self.odometry = data
        self.got_odometry = True

        # world2drone
        self.quaternion_d2w = Quaternion(self.odometry.pose.pose.orientation.w,
                                         self.odometry.pose.pose.orientation.x,
                                         self.odometry.pose.pose.orientation.y,
                                         self.odometry.pose.pose.orientation.z)

        # try:
        #     position, quaternion = self.listener.lookupTransform("uav1/gps_origin", "uav1/fcu", rospy.Time(0))

        if self._offset_ == "left":
          offset = Quaternion(0.707, 0, 0, 0.707)
          self.quaternion_d2w = offset * self.quaternion_d2w

        if self._offset_ == "right":
          offset = Quaternion(0.707, 0, 0, -0.707)
          self.quaternion_d2w = offset * self.quaternion_d2w

        # drone2world
        self.quaternion_w2d = self.quaternion_d2w.inverse
 def callback_global_pos(self, msg):
     if not self.ready:
         return
     rospy.loginfo_once('Got global position')
     self.got_position = True
     self.latitude = msg.latitude
     self.longitude = msg.longitude
 def sensor_check(self):
     try:
         while not rospy.is_shutdown():
             rospy.loginfo_once("Starting sensor_check Thread")
             self.sensor_check_callback()
     except KeyboardInterrupt:
         raise
def pcl_callback(pcl_in_msg):
    rospy.loginfo_once("Received first pointcloud")

    # Get the point in numpy format and filter them
    pcl_in_numpy = ros_numpy.point_cloud2.pointcloud2_to_xyz_array(pcl_in_msg)
    pcl_out_numpy = []
    for p in pcl_in_numpy:
        if np.linalg.norm(p) > clear_radius and p[2] > ground_th:
            pcl_out_numpy.append(p)

    # Transform into array
    pcl_out_numpy = np.array(pcl_out_numpy)

    # Check size
    if pcl_out_numpy.shape[0] == 0:
        rospy.logwarn_throttle(5, "No points in filtered point cloud!")
        return

    # Generate a PointCloud2 message and publish it
    pcl_out_msg = pcl_in_msg
    pcl_out_msg.header.stamp = rospy.Time.now()

    pcl_out_msg.width = pcl_out_numpy.shape[0]
    pcl_out_msg.height = 1
    pcl_out_msg.point_step = 12
    pcl_out_msg.row_step = pcl_out_msg.point_step * pcl_out_numpy.shape[1]
    pcl_out_msg.data = pcl_out_numpy.astype(np.float32).tobytes()

    pcl_pub.publish(pcl_out_msg)
    def attach_box(self, current_package, timeout=4):

        touch_links = self._robot.get_link_names(self._planning_group)
        self._scene.attach_box(self._eef_link,
                               current_package,
                               touch_links=touch_links)

        if rospy.get_param(
                '/ur5_2_vacuum_gripper_service') == True or rospy.get_param(
                    '/conveyor_belt_service') == True:
            rospy.loginfo_once("Waiting for Service")
            rospy.sleep(1)

        rospy.set_param('/ur5_1_vacuum_gripper_service', True)
        try:
            rospy.wait_for_service(
                '/eyrc/vb/ur5/activate_vacuum_gripper/ur5_1')
            self.attach = rospy.ServiceProxy(
                'eyrc/vb/ur5/activate_vacuum_gripper/ur5_1', vacuumGripper)
            self.attach(True)
            rospy.set_param('/ur5_1_vacuum_gripper_service', False)

        except rospy.ServiceException, e:
            print "Service call failed: %s" % e
            print "Trying to reconnect service"
            rospy.wait_for_service(
                '/eyrc/vb/ur5/activate_vacuum_gripper/ur5_1')
            rospy.set_param('/ur5_1_vacuum_gripper_service', True)
            self.attach = rospy.ServiceProxy(
                'eyrc/vb/ur5/activate_vacuum_gripper/ur5_1', vacuumGripper)
            self.attach(True)
            rospy.set_param('/ur5_1_vacuum_gripper_service', False)
 def callback_service_on_request(self, req):
     if req.get_package_type == True:
         self.pack = str(self._pc)
         return camera_packagesResponse(self.pack)
     else:
         rospy.loginfo_once("Package not found")
         return camera_packagesResponse("")
Esempio n. 13
0
    def scan_received(self, msg):
        """ This is the default logic for what to do when processing scan data.
            Feel free to modify this, however, we hope it will provide a good
            guide.  The input msg is an object of type sensor_msgs/LaserScan """
        if not(self.initialized):
            # wait for initialization to complete
            rospy.loginfo_once("Waiting for initial pose estimate...")
            return
        else:
            rospy.loginfo_once("Initial pose estimate found!")

        # wait a little while to see if the transform becomes available.  This fixes a race
        # condition where the scan would arrive a little bit before the odom to base_link transform
        # was updated.
        self.tf_listener.waitForTransform(self.base_frame, msg.header.frame_id, msg.header.stamp, rospy.Duration(0.5))
        if not(self.tf_listener.canTransform(self.base_frame, msg.header.frame_id, msg.header.stamp)):
            # need to know how to transform the laser to the base frame
            # this will be given by either Gazebo or neato_node
            return

        if not(self.tf_listener.canTransform(self.base_frame, self.odom_frame, msg.header.stamp)):
            # need to know how to transform between base and odometric frames
            # this will eventually be published by either Gazebo or neato_node
            return

        # calculate pose of laser relative to the robot base
        p = PoseStamped(header=Header(stamp=rospy.Time(0),
                                      frame_id=msg.header.frame_id))
        self.laser_pose = self.tf_listener.transformPose(self.base_frame, p)

        # find out where the robot thinks it is based on its odometry
        p = PoseStamped(header=Header(stamp=msg.header.stamp,
                                      frame_id=self.base_frame),
                        pose=Pose())
        self.odom_pose = self.tf_listener.transformPose(self.odom_frame, p)
        # store the the odometry pose in a more convenient format (x,y,theta)
        new_odom_xy_theta = self.transform_helper.convert_pose_to_xy_and_theta(self.odom_pose.pose)
        if not self.current_odom_xy_theta:
            self.current_odom_xy_theta = new_odom_xy_theta
            return

        if not(self.particle_cloud):
            # now that we have all of the necessary transforms we can update the particle cloud
            self.initialize_particle_cloud(msg.header.stamp)
        elif (math.fabs(new_odom_xy_theta[0] - self.current_odom_xy_theta[0]) > self.d_thresh or
              math.fabs(new_odom_xy_theta[1] - self.current_odom_xy_theta[1]) > self.d_thresh or
              math.fabs(new_odom_xy_theta[2] - self.current_odom_xy_theta[2]) > self.a_thresh):
            # we have moved far enough to do an update!
            self.update_particles_with_odom(msg)    # update based on odometry
            if self.last_projected_stable_scan:
                last_projected_scan_timeshift = deepcopy(self.last_projected_stable_scan)
                last_projected_scan_timeshift.header.stamp = msg.header.stamp
                self.scan_in_base_link = self.tf_listener.transformPointCloud("base_link", last_projected_scan_timeshift)

            self.update_particles_with_laser(msg)   # update based on laser scan
            self.update_robot_pose(msg.header.stamp)                # update robot's pose
            self.resample_particles()               # resample particles to focus on areas of high density
        # publish particles (so things like rviz can see them)
        self.publish_particles(msg)
Esempio n. 14
0
    def update(self, data, config):
        """
        @brief { function_description }

        @param      self The object
        @param      data data handled through the ros class
        @param      config parameters handled through dyn. recon.

        @return nothing
        """
        # protected region user update begin #
        # can be removed once filled
        
        # Deep Copy in -> out
        data.out_vel_out.linear.x = data.in_vel_in.linear.x
        data.out_vel_out.angular.z = data.in_vel_in.angular.z
        
        # Get Current Displacement
        if data.in_amcl_pose_updated:
            dx = data.in_amcl_pose.pose.pose.position.x - data.in_move_base_goal.goal.target_pose.pose.position.x
            dy = data.in_amcl_pose.pose.pose.position.y - data.in_move_base_goal.goal.target_pose.pose.position.y
            self.displacement = sqrt(dx * dx + dy * dy)

        if data.in_control_mode == 'NAVIGATION' and self.displacement < config.goal_dist_thres:
            # goal based velocity filter
            rospy.logdebug('Goal based Enhanced Filter Mode: %f < %f'% (self.displacement, config.goal_dist_thres)) 
            data.out_vel_out.linear.x = config.Kp * self.displacement * data.in_vel_in.linear.x
            rospy.logdebug('Reduce velocity from %f to %f' % (data.in_vel_in.linear.x, data.out_vel_out.linear.x))
            if data.out_vel_out.linear.x < config.auto_min_vel and config.auto_min_vel < data.in_vel_in.linear.x:
                rospy.logdebug('Bounded to %f', config.auto_min_vel)
                data.out_vel_out.linear.x = config.auto_min_vel
            # data.out_vel_out.angular.z = config.Kp * self.displacement * data.in_vel_in.angular.z
        else:
            rospy.loginfo_once('Acceleration Limit Velocity Filter Mode')
            # move_base is not started yet or started but in not active mode 
            # just normal acceleration limit velocity filter
            # 0.05 sec is from 20 Hz

        # Find request linear acceleration
        acc_lin = (data.out_vel_out.linear.x  - self.vel_prev.linear.x) / 0.05
        # Filter Linear Velocity
        if acc_lin > config.acc_lin_lim:
            rospy.logdebug('Filter + %f, %f', acc_lin, config.acc_lin_lim)
            data.out_vel_out.linear.x = self.vel_prev.linear.x + config.acc_lin_lim * 0.05
        elif acc_lin < -config.dec_lin_lim:
            rospy.logdebug('Filter - %f, %f', acc_lin, config.dec_lin_lim)
            data.out_vel_out.linear.x = self.vel_prev.linear.x - config.dec_lin_lim * 0.05

        # Find request angular acceleration
        acc_ang = (data.out_vel_out.angular.z  - self.vel_prev.angular.z) / 0.05
        # Filter Angular Velocity
        if acc_ang > config.acc_ang_lim:
            rospy.logdebug('Filter + %f, %f', acc_ang, config.acc_ang_lim)
            data.out_vel_out.angular.z = self.vel_prev.angular.z + config.acc_ang_lim * 0.05
        elif acc_ang < -config.dec_ang_lim:
            rospy.logdebug('Filter - %f, %f', acc_ang, config.dec_ang_lim)
            data.out_vel_out.angular.z = self.vel_prev.angular.z - config.dec_ang_lim * 0.05
            
        self.vel_prev = data.out_vel_out
Esempio n. 15
0
    def mainTimer(self, event):

        rospy.loginfo_once('Main timer spinning')

        self.publisher_fov.publish(self.image_size)
        self.publisher_overlap.publish(self.overlap)
        self.publisher_width.publish(self.image_size*self.width)
        self.publisher_height.publish(self.image_size*self.height)
Esempio n. 16
0
    def run(self):
        while not rospy.is_shutdown():
            if self.goal_msg == None:
                rospy.loginfo_once("Waiting for goal...")
                continue

            cmd = self.computeTwistCmd()
            self.twist_pub.publish(cmd)
            self.update_rate.sleep()
    def callback(self, msg):

        # convert ros imgmsg to opencv img
        cloud = orh.rospc_to_o3dpc(msg, remove_nans=True)
        cloud = cloud.voxel_down_sample(voxel_size=self.voxel_size)
        cloud = orh.apply_pass_through_filter(cloud, self.ROI['x'],
                                              self.ROI['y'], self.ROI['z'])
        cloud = orh.o3dpc_to_rospc(cloud, frame_id=msg.header.frame_id)
        self.point_pub.publish(cloud)

        rospy.loginfo_once("Published the result as topic. check /filt_point2")
Esempio n. 18
0
 def run_publisher(self):
     '''
     Publisher thread main loop
     '''
     rospy.loginfo_once("Starting publisher Thread")
     try:
         while not rospy.is_shutdown() and self.connection_status:
             self.connection_status = self._robot.get_connection_status()
             self.publish()
             self._ros_rate.sleep()
     except KeyboardInterrupt:
         pass
Esempio n. 19
0
def talker():
    rospy.init_node('steering_sweeper', anonymous=True)
    rate = rospy.Rate(20) # 20hz
    steer_class=dbw_mkz_msgs.msg.SteeringCmd()
    steer_pub = rospy.Publisher('vehicle/steering_cmd',dbw_mkz_msgs.msg.SteeringCmd, queue_size=2)
    inputval=float(input("Please enter the desired steering angle in DEGREES: "))
    desSteerAngleRad=inputval*(math.pi/180) # Need to convert from deg to radians for Steering Cmd
    steer_class.enable=True
    steer_class.steering_wheel_angle_cmd=desSteerAngleRad
    steer_class.steering_wheel_angle_velocity=2 # Hard coded here.
    while not rospy.is_shutdown():
        rospy.loginfo_once('Published Steering Command')
        steer_pub.publish(steer_class)
        rate.sleep()
Esempio n. 20
0
    def publish_occupancygrid(self, gridmap, stamp):
        # Convert gridmap to ROS supported data type : int8[]
        # http://docs.ros.org/en/melodic/api/nav_msgs/html/msg/OccupancyGrid.html
        # The map data, in row-major order, starting with (0,0).  Occupancy probabilities are in the range [0,100].  Unknown is -1.
        gridmap_p = l2p(gridmap)
        #unknown_mask = (gridmap_p == self.sensor_model_p_prior)  # for setting unknown cells to -1
        gridmap_int8 = (gridmap_p * 100).astype(dtype=np.int8)
        #gridmap_int8[unknown_mask] = -1  # for setting unknown cells to -1

        # Publish map
        self.map_msg.data = gridmap_int8
        self.map_msg.header.stamp = stamp
        self.map_pub.publish(self.map_msg)
        rospy.loginfo_once("Published map!")
def main():

    rospy.init_node('node_t5_qr_decode_service', anonymous=True)
    rospy.sleep(6)
    cam = Camera1()

    # wait for process to finish
    rospy.sleep(4)
    packages = str(cam._pc)
    print 'Packages Detected = ' + str(len(cam._pc))
    rospy.loginfo_once(cam._pc)
    s = rospy.Service('/2Dcamera_packages_type', camera_packages,
                      cam.callback_service_on_request)

    pkg = {
        'red': ['R', 'HP', 'Medicine', '450'],
        'green': ['G', 'LP', 'Clothes', '150'],
        'yellow': ['Y', 'MP', 'Food', '250']
    }
    count = 0
    for i in cam._pc.keys():
        count += 1
        pkg_location = i[-2:]
        colour = cam._pc[i]
        pkg_sku = pkg[colour][0] + pkg_location + '0121'
        pkg_item = pkg[colour][2]
        pkg_priority = pkg[colour][1]
        pkg_storage = 'R' + pkg_location[0] + ' C' + pkg_location[1]
        pkg_cost = pkg[colour][3]

        info = {
            'id': 'Inventory',
            'Team Id': 'VB#693',
            'Unique Id': 'RRCneYRC',
            'SKU': pkg_sku,
            'Item': pkg_item,
            'Priority': pkg_priority,
            'Storage Number': pkg_storage,
            'Cost': pkg_cost,
            'Quantity': '1'
        }
        message = str(info)
        goal_handle = cam.send_goal_to_mqtt_client("spreadsheet", "pub",
                                                   cam._config_mqtt_pub_topic,
                                                   message)
        cam._goal_handles['Inventory:' + str(count)] = goal_handle
        rospy.sleep(1)

    rospy.loginfo("Goal Sent")
    rospy.spin()
Esempio n. 22
0
    def connect(self):
        self.every_event_listener.subscribe()

        rate = rospy.Rate(1)  # 1hz
        while True:
            self.pub_state.publish("CONNECTING")
            connection = self.drone.connect()
            if getattr(connection, 'OK') == True:
                break
            if rospy.is_shutdown():
                exit()
            rate.sleep()

        # Connect to the SkyController
        if rospy.get_param("/skycontroller"):
            self.pub_state.publish("CONNECTED_SKYCONTROLLER")
            rospy.loginfo("Connection to SkyController: " +
                          getattr(connection, 'message'))
            self.switch_manual()

            # Connect to the drone
            while True:
                if self.drone(
                        connection_state(state="connected", _policy="check")):
                    break
                if rospy.is_shutdown():
                    exit()
                else:
                    self.pub_state.publish("SERCHING_DRONE")
                    rospy.loginfo_once(
                        "Connection to Anafi: " +
                        str(self.drone.get_state(connection_state)["state"]))
                rate.sleep()
            self.pub_state.publish("CONNECTED_DRONE")
            rospy.loginfo("Connection to Anafi: " +
                          str(self.drone.get_state(connection_state)["state"]))
        # Connect to the Anafi
        else:
            self.pub_state.publish("CONNECTED_DRONE")
            rospy.loginfo("Connection to Anafi: " +
                          getattr(connection, 'message'))
            self.switch_offboard()

        self.frame_queue = queue.Queue()
        self.flush_queue_lock = threading.Lock()

        # Setup the callback functions to do some live video processing
        self.drone.set_streaming_callbacks(raw_cb=self.yuv_frame_cb,
                                           flush_raw_cb=self.flush_cb)
        self.drone.start_video_streaming()
Esempio n. 23
0
 def _cb_localPos(self, msg):
     self.curPos = msg
     if self.first:
         self.first = False
         if (self.uavState.armed == True) and (self.isAirbourne == False):
             if (self.curPos.pose.position.z > 1.0):
                 # if the node is started and the drone is airbourne, take control and loiter
                 rospy.loginfo_once(
                     "UAV is airbourne, Companion Computer taking control")
                 self.isAirbourne = True
                 self.companion_has_control = True
                 self.messageHandlerPub.publish(self.companion_has_control)
                 self.setState('loiter')
             pass
Esempio n. 24
0
	def capture_handler(self):
		clear_neib = True
		temp_state_oppo_neigh = {k:v for k, v in self.state_oppo_neigh.items()}
		# print(temp_state_oppo_neigh)

		for i in temp_state_oppo_neigh:
			if self.is_capture(i):
				self.caplist.append(i)
				rospy.loginfo_once(str(self)+' reports: '+i+' is captured')
			else:
				clear_neib = False

		if clear_neib:
			rospy.loginfo_once(str(self)+' reports: not seeing any intruders')
Esempio n. 25
0
def main():
    rospy.init_node('rgb_tset')
    br = tf.TransformBroadcaster()
    rate = rospy.Rate(10.0)
    # -90, 0, -90에 대한 quaternion 계산
    x, y, z, w = tf.transformations.quaternion_from_euler(-np.pi / 2, 0, -np.pi / 2)
    while not rospy.is_shutdown():
        rospy.loginfo_once("rgb_OK")
        # sendTransform( translation, quaternion, 현제시간, 생성할 tf 이름, 부모 tf 이름)
        br.sendTransform((0, 0, 0),
                         (x, y, z, w),
                         rospy.Time.now(),
                         "tb3_1/rgb_test",
                         "tb3_1/cam_test")
        rate.sleep()
Esempio n. 26
0
    def show_search_camera_prop(self):
        if self._pubvideo is not None:
            while self._color_counter in self._color_not_accepted:
                rospy.loginfo_once("Search")
                self._color_counter += 1
            try:
                cv2.imshow(self._cv2WindowName, cv2.cvtColor(self._pubvideo, self._color_counter ))
                # cv2.imshow(self._cv2WindowName, cv2.cvtColor(self._pubvideo, 2 ))
                rospy.loginfo(f'_color_counter ={self._color_counter}')

            except Exception as e:
                rospy.loginfo(f'_color_counter ={self._color_counter} not Accepted')
            self._color_counter += 1
            if self._color_counter > self._color_counter_max:
                self._color_counter = self._color_counter_min
    def __init__(self):
        self.ready = False
        self.got_position = False
        self.airborne= False

        rospy.init_node('pixhawk_takeoff', anonymous=True)
        self.takeoff_height = rospy.get_param('~takeoff_height')

        self.subscriber_pos = rospy.Subscriber('~global_pos_in', NavSatFix, self.callback_global_pos)
        self.service_arming = rospy.ServiceProxy('~arming_out', CommandBool)
        self.service_set_mode = rospy.ServiceProxy('~mode_out', SetMode)
        self.service_push_wp = rospy.ServiceProxy('~waypoint_push', WaypointPush)
        
        self.ready = True

        while not self.got_position and not rospy.is_shutdown():
            rospy.sleep(0.1)
            rospy.loginfo_once('Waiting for vehicle position...')

        # #{ Prepare mission
        wp0 = Waypoint()
        wp0.frame = 3 # GLOBAL_REL_ALT
        wp0.command = 22 # TAKEOFF
        wp0.is_current = True
        wp0.x_lat = self.latitude
        wp0.y_long = self.longitude
        wp0.z_alt = self.takeoff_height
        wp0.autocontinue = True

        wp1 = Waypoint()
        wp1.frame = 3 # GLOBAL_REL_ALT
        wp1.command = 17 # NAV_LOITER_UNLIM
        wp1.is_current = False
        wp1.x_lat = self.latitude
        wp1.y_long = self.longitude
        wp1.z_alt = self.takeoff_height
        wp1.autocontinue = True
        # #}
        
        resp = self.service_push_wp.call(0, [wp0, wp1])
        print(resp.success)
        print(resp.wp_transfered)
        self.switch_to_mode('AUTO.MISSION')

        while not self.call_arming() and not rospy.is_shutdown():
            rospy.logerr_once('Arming failed. Retrying...')
            rospy.sleep(0.01)
        rospy.loginfo('Vehicle armed')
    def ocp_result_callback(self, data):
        rospy.loginfo_once("First message received.")
        if not self.initialized:
            self.dim_states = data.dim_states
            self.dim_controls = data.dim_controls

        # Read data
        self.mutex.acquire()
        if self.plot_states:
            self.tx = np.array(data.time_states)
            self.x = np.matrix(data.states)
            self.x = np.reshape(self.x, (self.tx.size, int(self.dim_states)))
        self.tu = np.array(data.time_controls)
        self.u = np.matrix(data.controls)
        self.u = np.reshape(self.u, (self.tu.size, int(self.dim_controls)))
        self.mutex.release()
Esempio n. 29
0
    def connect(self):

        vel = Twist()
        for i in range(100):
            self.cmd_vel_pub.publish(vel)

        while not self.state.connected:
            rospy.loginfo_once("Connecting to drone")
            rospy.sleep(0.01)

        rospy.loginfo("Connected to Drone")

        while self.state.mode != "GUIDED":
            rospy.loginfo_once("Waiting for mode to set to `GUIDED`")
            rospy.sleep(0.01)

        rospy.loginfo("Mode set to `GUIDED`")
Esempio n. 30
0
def sync_save(image1, image2):
    imgl = conv.imgmsg_to_cv2(image1)
    imgr = conv.imgmsg_to_cv2(image2)
    namel = str(image1.header.stamp.secs) + '.' + str(image1.header.stamp.nsecs) + '.jpg'
    namer = str(image1.header.stamp.secs) + '.' + str(image1.header.stamp.nsecs) + '.jpg'
    dirl = "/media/E/All_codes/A_lab/A_grad/stero_slam/slam_catkin_ws/src/stereo_dso_ros/data/" + dataDir + "/image_0"
    dirr = "/media/E/All_codes/A_lab/A_grad/stero_slam/slam_catkin_ws/src/stereo_dso_ros/data/" + dataDir + "/image_1"
    if not os.path.exists(dirl):
        os.makedirs(dirl)
    if not os.path.exists(dirr):
        os.makedirs(dirr)
    
    cv.imwrite(dirl + '/' + namel, imgl)
    cv.imwrite(dirr + '/' + namer, imgr)

    rospy.loginfo_once("image saved to " + dirl + '/' + namel)
    rospy.loginfo_once("image saved to " + dirr + '/' + namer)