コード例 #1
0
ファイル: sensors.py プロジェクト: IRLL/Inverted-Pendulum
    def sensor_callback(self, data):
        status = PendulumPose()
        status.header = Header()
        status.header.stamp = rospy.Time.now()

        now = rospy.get_time()
        delta = now - self.prev_time

        status.x = self.pos_filter.run(self.get_position(data.data[1]))

        if(abs(status.x) > self.soft_edge):
            self.edge_hit = True

        status.edge = self.edge_hit



        if self.leftLim:
            status.x = -self.track_length/2
        if self.rightLim:
            status.x = self.track_length/2


        status_bits = data.data[2] >> 8;

        md = status_bits & (1 << 5);
        ml = status_bits & (1 << 4);
        mh = status_bits & (1 << 3);


        #override magnet detection
        md = 1   

        if md:
            self.current_unrotated_theta = (data.data[0] & 0xFFFF) * self.theta_conv
            status.theta = self.rotate(self.current_unrotated_theta)

        else:
            status.theta = 0
#rospy.logerr("magnet not detected!")

        if ml: rospy.loginfo("magnet strength low")
        if mh: rospy.loginfo("magnet strength high")


        try:
            status.thetaDot = (status.theta - self.prev_theta)/delta
            status.xDot = self.vel_filter.run((status.x - self.prev_x)/delta)
        except ZeroDivisionError:
            rospy.logwarning("delta is 0, no time passed?")


        self.sensor_pub.publish(status)

        self.prev_time = now
        self.prev_theta = status.theta
        self.prev_x = status.x
コード例 #2
0
ファイル: client_cmd.py プロジェクト: hugoxia/code
    def on_channel_closed(self, channel, reply_code, reply_text):
        """Invoked by pika when RabbitMQ unexpectedly closes the channel.
        Channels are usually closed if you attempt to do something that
        violates the protocol, such as re-declare an exchange or queue with
        different parameters. In this case, we'll close the connection
        to shutdown the object.

        :param pika.channel.Channel: The closed channel
        :param int reply_code: The numeric reason the channel was closed
        :param str reply_text: The text reason the channel was closed

        """
        rospy.logwarning('Channel %i was closed: (%s) %s',
                         channel, reply_code, reply_text)
        self._connection.close()
コード例 #3
0
ファイル: client_cmd.py プロジェクト: hugoxia/code
    def on_connection_closed(self, connection, reply_code, reply_text):
        """This method is invoked by pika when the connection to RabbitMQ is
        closed unexpectedly. Since it is unexpected, we will reconnect to
        RabbitMQ if it disconnects.

        :param pika.connection.Connection connection: The closed connection obj
        :param int reply_code: The server provided reply_code if given
        :param str reply_text: The server provided reply_text if given

        """
        self._channel = None
        if self._closing:
            self._connection.ioloop.stop()
        else:
            rospy.logwarning('Connection closed, reopening in 5s: (%s) %s',
                             reply_code, reply_text)
            self._connection.add_timeout(5, self.reconnect)
コード例 #4
0
    def handleRequest(self, cmdName, request, stopMethod):
        rospy.logdebug("Received request: {}".format(cmdName))
        rospy.logdebug("Duration: {}".format(request.duration))
        rospy.logdebug("Magnitude: {}".format(request.magnitude))

        # Do not run with invalid parameters
        if (request.duration == 0):
            rospy.logwarning("Received action with duration 0")
            return

        # Get reference to the service proxy
        # And initiate the action
        serviceFunction = SambucaCmdFunc[cmdName]
        serviceFunction(request.magnitude)

        # Run a timer callback to stop the action
        duration = rospy.Duration(request.duration)
        rospy.Timer(period=duration, callback=stopMethod, oneshot=True)
コード例 #5
0
 def goal_callback(self, msg):
     success = False
     rate = rospy.Rate(1)
     for curTime in range(self.timeOut):
         if self.getOdomDataActionServer.is_preempt_requested():
             rospy.logwarning("action server pre-empted")
             success = False
             break
         self.odomData.append(self.odomReader.read())
         #print(self.odomData)
         outOfMaze = check_if_out_maze(self.distGoal, self.odomData)
         if outOfMaze == True:
             success = True
             break
         rate.sleep()
     if success == True:
         self.result.result_odom_array = self.odomData
         self.getOdomDataActionServer.set_succeeded(self.result)
     else:
         rospy.loginfo("spehroDistActionServer timed out")
コード例 #6
0
 def callback(self, goal):
   rospy.loginfo("odomActionServer goal requested")
   success = False
   rate = rospy.Rate(1)
   for curTime in range(self.timeOut):
     if self.actionServer.is_preempt_requested():
       rospy.logwarning("action server pre-empted")
       success = False
       break
     self.odomData.append(self.odomReader.read())
     #print(self.odomData)
     outOfMaze = self.check_if_out_maze(self.distTarget, self.odomData)
     if outOfMaze == True:
       success = True
       break
     rate.sleep()
   self.result.result_odom_array = self.odomData
   if success == True:
     self.actionServer.set_succeeded(self.result)
   else:
     rospy.loginfo("odomActionServer timed out")
     self.actionServer.set_aborted()
コード例 #7
0
ファイル: playback.py プロジェクト: kewangtt/ShutTUM
is_calib = ros.get_param('~calibration', False)
distmodel = ros.get_param('~distortion_model', 'fov')

bridge = CvBridge()
sequence = ros.get_param('~sequence')
dataset = Sequence(sequence, calibration=is_calib)

msg = 'Playback started [%s]' % sequence
if loop: msg += ' [LOOP]'
if start: msg += ' [START: %s s]' % start
else: start = None
if end: msg += ' [END: %s s]' % end
else: end = None
if speed != 1: msg += ' [%.2fx]' % speed
if speed == 0:
    ros.logwarning('Speed is zero, results might not be as expected')
ros.loginfo(msg)

# Create Publishers
clock = ros.Publisher('/clock', Clock, queue_size=10)
tffer = tf.TransformBroadcaster()
imu = ros.Publisher('/imu', Imu, queue_size=10)
CamPub = namedtuple('CamPub', 'img cam exp')


def create_camera_publisher(prefix):
    return (prefix,
            CamPub(img=ros.Publisher(prefix + '/' + img_topic,
                                     Image,
                                     queue_size=1),
                   cam=ros.Publisher(prefix + '/camera_info',
コード例 #8
0
    def execute(self, goal):
###################################################################### 
    # helper variables
        # Override for debug:
        # self.f.write(goal)

        r = rospy.Rate(1000)
        success = True
        rospy.logdebug("-D- robot_action in execute_cb")
        # publish info to the console for the user
        rospy.loginfo('%s: Executing trajectory' % self._action_name)
        
        # start executing the action
        if self._as.is_preempt_requested():
            # rospy.loginfo('%s: Preempted' % self._action_name)
            self._as.set_preempted()
            success = False
        else:
            #self.command_msg.header.stamp = rospy.Time.now()
            #self.command_msg.name = goal.trajectory.joint_names
            #self._feedback.joint_names = goal.trajectory.joint_names
            
            self.set_jointstate_lookup(goal.trajectory.joint_names) 
            rospy.loginfo("robot_action looping over %d points" % len(goal.trajectory.points))
            start_time = rospy.Time.now()

            for i in range(len(goal.trajectory.points)):
                t = goal.trajectory.points[i].time_from_start
                rospy.loginfo("setting joints to %s, t: %s, time since start: %s" % ( str(goal.trajectory.points[i].positions), str(t), str(rospy.Time.now() - start_time)))
                

                ## check for empty arrays:
                # rospy.loginfo("lengths: command_msg.joints: %d robot_state_index: %d positions: %d" % (
                #            len( self.command_msg.joints ), len( self.robot_state_index), len( goal.trajectory.points[i].positions)))
                if len( self.robot_state_index )== 0:
                    rospy.logwarn( "robot_state_index is empty" )
                    rospy.sleep(0.001)
                    continue
                if len( self.command_msg.joints )== 0:
                    rospy.logwarning( "command_msg is empty" )
                    rospy.sleep(0.001)
                    continue

                for j in range(len(goal.trajectory.points[i].positions)):
                    self.command_msg.joints[ self.robot_state_index[j] ] = goal.trajectory.points[i].positions[j] 
                # rospy.logdebug("set command_msg.joints to %s" % str( self.command_msg.joints ))

                self.command_pub.publish(self.command_msg)
                # rospy.logdebug("robot_action waiting to reach goal #%d" % i)
                success = self.waitToReachGoal(self.command_msg)

                # wait for arrival time to make sure it's timed right
                while rospy.Time.now() - start_time < t:
                    rospy.sleep(0.025)

                if success:
                    rospy.loginfo("robot_action reached goal")
                    #self._feedback.actual.positions = self.robot_state
                    #self._as.publish_feedback(self._feedback)
                if not success:
                    rospy.logerr("%s: error: did not reach goal" % self._action_name)
                    #self._as.set_aborted(self._result)
          #         self._as.set_aborted()
                    return 
                # r.sleep()
            
        # publish the feedback
        #self._as.publish_feedback(self._feedback)
          
        if success:
          rospy.loginfo('%s: Succeeded' % self._action_name)
          #self._as.set_succeeded(self._result)
          self._as.set_succeeded()
コード例 #9
0
    def run(self):
        '''
        while not rospy.is_shutdown():
            ret, frame = self.cap.read()
            self.r.sleep()
        '''
        for frame in self.cam.capture_continuous(self.rawCapture,
                                                 format="bgr",
                                                 use_video_port=True):

            #--------- img pre-process----------------
            cv_image = frame.array
            self.grey = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY)
            self.grey = cv2.equalizeHist(self.grey)
            self.grey = cv2.GaussianBlur(self.grey, (7, 7), 0)
            self.rawCapture.truncate(0)  # ????

            # for dispaly purpose
            vis = self.grey.copy()

            #---------  Subtractor - Track every frame motion ------------
            if self.robot_view_motion is False:

                if self.Vp_MD_Subtr_UseMog:
                    cnts = self.subtrMog(self.grey)
                elif Vp_MD_Subtr_UseFrmDiff:
                    cnts = FrmDiff(self.prev_grey, self.grey)
                else:  # use avg
                    cnts = avgDiff(self.grey)

                # got diff. loop over the contours
                if len(cnts) > 0:

                    c_max = cnts[0]
                    for c in cnts:
                        # discard little change
                        if cv2.contourArea(c) < self.min_area:
                            continue
                        if cv2.contourArea(c_max) < cv2.contourArea(c):
                            c_max = c
                        if DBG_WINDOW_DETECT_BOX:
                            (x_, y_, w_, h_) = cv2.boundingRect(c)
                            cv2.rectangle(vis, (x_, y_), (x_ + w_, y_ + h_),
                                          (255, 0, 0), 2)

                    if cv2.contourArea(
                            c_max
                    ) > self.min_area:  # avoid many too small c in frame
                        (x, y, w, h) = cv2.boundingRect(c_max)
                        cv2.rectangle(vis, (x, y), (x + w, y + h), (0, 255, 0),
                                      4)
                        if DBG_WINDOW_DETECT_BOX:
                            cv2.imshow("[1]subtractor_boundingRect", vis)
                            key = cv2.waitKey(1)
                        self.detect_box = (x, y, w, h)
                        self.motionDetected = True
                        self.motionDetectedHold = True

                        print("~~~ Motion detected. motionDetected = True")

                    else:
                        print(
                            "//// No motion detected. motionDetected = Fasle")

                    if self.Vp_ME_UseLkTractor:  # if do Motion Estimation, get key points firstly
                        self.keypoints = self.get_keypoints(
                            self.grey, self.detect_box)
                        print("@@@@@@ self.frame_idx, pre get keypoints:",
                              self.frame_idx, self.keypoints)

                    # if have points, display them
                    if self.keypoints is not None and len(self.keypoints) > 0:
                        for x, y in self.keypoints:
                            cv2.circle(vis, (x, y), self.feature_size,
                                       (255, 0, 0), cv.CV_FILLED, 8, 0)

                        if DBG_WINDOW_GET_KEYPOINTS:
                            cv2.imshow("[1]get_keypoints", vis)
                            key = cv2.waitKey(10)

                    # visualMsg.motion_detected,

                    #cv2.putText()

                else:  # len(cnts) <= 0
                    self.motionDetected = False
                    #self.detect_box = None
                    print("//// No motion detected. motionDetected = Fasle")

            else:
                rospy.logwarning(
                    "Err:motion detect when robot view itself has motion. Not Implemented!"
                )

            # -------- if find motion, track it ----------
            # LK Tracker
            if self.Vp_ME_UseLkTractor:
                print("///// self.keypoints before lk track is:",
                      self.keypoints)
                if self.keypoints is not None and len(self.keypoints) > 0:

                    self.md_visualMsg.roi = self.lk_tracker.run(
                        self.prev_grey, self.grey, self.keypoints)

            #----- if interval , pub info -----
            if self.frame_idx % self.detect_interval == 0:

                self.md_visualMsg.motion_detected = self.motionDetectedHold
                print("$$$$$ md_visualMsg.motion_detected is:",
                      self.md_visualMsg.motion_detected)
                (self.md_visualMsg.roi.x, self.md_visualMsg.roi.y,
                 self.md_visualMsg.roi.height,
                 self.md_visualMsg.roi.width) = self.detect_box

                self.publish_motion()

                #clear motion detected hold flag after pub
                self.motionDetectedHold = False

                if DEBUG_L2:
                    print(
                        '~loop A interval. motion info Pubed. frame_idx is %d'
                        % self.frame_idx)

            self.frame_idx += 1
            self.prev_gray = self.grey

            #clear the stream in preparation for next frame
            self.rawCapture.truncate(0)

            #if DEBUG_L2: print ('~loop once. frame_idx is %d' %self.frame_idx )

            key = cv2.waitKey(10) & 0xFF
            if key == 27:  # esc
                cv2.destroyAllWindows()
                break