コード例 #1
0
    def handle_idle_status(self, msg):
        if msg.enabled:
            self.motionProxy.setBreathEnabled('Body', True)
            self.motionProxy.setBreathEnabled('Head', False)

            if self.enable_leaning_forward:
                self.motionProxy.setBreathEnabled('Legs', False)
            self.motionProxy.setBreathConfig([['Bpm', 12], ['Amplitude', 0.8]])

            if self.enable_leaning_forward:
                rospy.sleep(0.6)

                cmd_msg = JointAnglesWithSpeed()
                cmd_msg.header.stamp = rospy.Time.now()
                cmd_msg.joint_names.append('LHipPitch')
                cmd_msg.joint_angles.append(-9.0 * math.pi / 180.0)
                cmd_msg.joint_names.append('RHipPitch')
                cmd_msg.joint_angles.append(-9.0 * math.pi / 180.0)
                cmd_msg.speed = 0.1
                cmd_msg.relative = 0

                self.pub_joint_cmd.publish(cmd_msg)
                rospy.sleep(0.2)

        else:
            self.postureProxy.goToPosture("Stand", 0.2)

            self.motionProxy.setBreathEnabled('Body', False)
            self.motionProxy.setBreathEnabled('Head', False)

            if self.enable_leaning_forward:
                self.motionProxy.setBreathEnabled('Legs', False)
コード例 #2
0
ファイル: nao_larm.py プロジェクト: arnabGudu/nao_ws
def larm():
    names = ['LShoulderPitch', 'LShoulderRoll', 'LElbowYaw', 'LElbowRoll']
    pub = rospy.Publisher('/joint_angles', JointAnglesWithSpeed, queue_size=1)
    rospy.init_node('nao_larm', anonymous=True)
    rate = rospy.Rate(10)  # 10hz

    with open(filename, 'rU') as csvfile:
        csvreader = csv.reader(csvfile)
        for row in csvreader:
            cols = []
            angles = []
            for col in row:
                cols.append(col)
            for i in range(2, 6):
                angles.append(float(cols[i]))

            print names, angles
            msg = JointAnglesWithSpeed()
            msg.joint_names = names
            msg.joint_angles = angles
            msg.speed = 1.0

            msg.header.frame_id = 'Fixed Frame'
            pub.publish(msg)
            rate.sleep()
コード例 #3
0
    def set_joint_angles(self, joint_name, angle_value):

        joint_angles_to_set = JointAnglesWithSpeed()
        joint_angles_to_set.joint_names.append(joint_name) # each joint has a specific name, look into the joint_state topic or google
        joint_angles_to_set.joint_angles.append(angle_value) # the joint values have to be in the same order as the names!!
        joint_angles_to_set.relative = False # if true you can increment positions
        joint_angles_to_set.speed = 0.1 # keep this low if you can
        self.jointPub.publish(joint_angles_to_set)
コード例 #4
0
 def stand(self):
     print "STANDING"
     self.__call_service(
         "/naoqi_driver/robot_posture/go_to_posture", GoToPosture,
         GoToPostureRequest(GoToPostureRequest.STAND_INIT, 0.5))
     j = JointAnglesWithSpeed()
     j.joint_names = ['HeadYaw', 'HeadPitch']
     j.joint_angles = [0., -.5]
     j.speed = .05
     self.joints.publish(j)
コード例 #5
0
 def Elbow_yaw_R_moveto(self, arm_angle):  # controls right elbow movement
     joint_angles_to_set = JointAnglesWithSpeed()
     joint_angles_to_set.joint_names.append(
         "RElbowYaw"
     )  # each joint has a specific name, look into the joint_state topic or google
     joint_angles_to_set.joint_angles.append(
         arm_angle
     )  # the joint values have to be in the same order as the names!!
     joint_angles_to_set.relative = False  # if true you can increment positions
     joint_angles_to_set.speed = 0.1  # keep this low if you can
     self.jointPub.publish(joint_angles_to_set)
コード例 #6
0
 def Shoulder_roll_L_moveto(self, angle):  # controls left shoulder movement
     joint_angles_to_set = JointAnglesWithSpeed()
     joint_angles_to_set.joint_names.append(
         "LShoulderRoll"
     )  # each joint has a specific name, look into the joint_state topic or google
     joint_angles_to_set.joint_angles.append(
         angle
     )  # the joint values have to be in the same order as the names!!
     joint_angles_to_set.relative = False  # if true you can increment positions
     joint_angles_to_set.speed = 0.1  # keep this low if you can
     self.jointPub.publish(joint_angles_to_set)
コード例 #7
0
    def handle_enable_leaning_forward(self, msg):
        self.enable_leaning_forward = msg.data
        if msg.data:
            self.motionProxy.setBreathEnabled('Legs', False)

            cmd_msg = JointAnglesWithSpeed()
            cmd_msg.header.stamp = rospy.Time.now()
            cmd_msg.joint_names.append('LHipPitch')
            cmd_msg.joint_angles.append(-9.0 * math.pi / 180.0)
            cmd_msg.joint_names.append('RHipPitch')
            cmd_msg.joint_angles.append(-9.0 * math.pi / 180.0)
            cmd_msg.speed = 0.06
            cmd_msg.relative = 0

            self.pub_joint_cmd.publish(cmd_msg)
        else:
            cmd_msg = JointAnglesWithSpeed()
            cmd_msg.header.stamp = rospy.Time.now()
            cmd_msg.joint_names.append('LHipPitch')
            cmd_msg.joint_angles.append(0.0 * math.pi / 180.0)
            cmd_msg.joint_names.append('RHipPitch')
            cmd_msg.joint_angles.append(0.0 * math.pi / 180.0)
            cmd_msg.speed = 0.06
            cmd_msg.relative = 0

            self.motionProxy.setBreathEnabled('Legs', True)
コード例 #8
0
    def set_head_angles(self, head_angle):  ## controls head movement

        joint_angles_to_set = JointAnglesWithSpeed()
        joint_angles_to_set.joint_names.append(
            "HeadYaw"
        )  # each joint has a specific name, look into the joint_state topic or google
        joint_angles_to_set.joint_angles.append(
            head_angle
        )  # the joint values have to be in the same order as the names!!
        joint_angles_to_set.relative = False  # if true you can increment positions
        joint_angles_to_set.speed = 0.1  # keep this low if you can
        self.jointPub.publish(joint_angles_to_set)
        joint_angles_to_set.joint_names.append("HeadPitch")
        joint_angles_to_set.joint_angles.append(head_angle)
        self.jointPub.publish(joint_angles_to_set)
コード例 #9
0
    def handle_gaze_controller(self, event):
        # try:
        with self.lock:
            try:
                aaa = PointStamped()
                aaa.header.stamp = rospy.Time()
                aaa.header.frame_id = self.target.target_point.header.frame_id
                aaa.point.x = self.target.target_point.point.x
                aaa.point.y = self.target.target_point.point.y
                aaa.point.z = self.target.target_point.point.z
                point_transformed = self.tf_buf.transform(aaa, 'gaze')
            except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException):
                e = sys.exc_info()[0]
                rospy.logdebug(e)
                rospy.logwarn("[%s] Can't tranform from gaze to target.[ %s - %s ]"%(rospy.get_name(), 'gaze', self.target.target_point.header.frame_id))
                return

        pan_angle = math.atan2(point_transformed.point.y, point_transformed.point.x)
        tilt_angle = math.atan2(point_transformed.point.z, point_transformed.point.x)

        delta_pan_angle = pan_angle
        delta_tilt_angle = -tilt_angle

        cmd_pan_angle = 0.0
        cmd_tilt_angle = 0.0

        if abs(delta_pan_angle) < (2.0 * math.pi / 180.0):
            cmd_pan_angle = 0.0
        else:
            cmd_pan_angle = 0.3 * delta_pan_angle

        if abs(delta_tilt_angle) < (2.0 * math.pi / 180.0):
            cmd_tilt_angle = 0.0
        else:
            cmd_tilt_angle = 0.3 * delta_tilt_angle


        cmd_msg = JointAnglesWithSpeed()
        cmd_msg.header.stamp = rospy.Time.now()
        cmd_msg.joint_names.append('HeadYaw')
        cmd_msg.joint_angles.append(cmd_pan_angle)
        cmd_msg.joint_names.append('HeadPitch')
        cmd_msg.joint_angles.append(cmd_tilt_angle)
        cmd_msg.speed = self.target.max_speed
        cmd_msg.relative = 1

        self.pub_gaze_cmd.publish(cmd_msg)
コード例 #10
0
 def __init__(self):
     self.position = JointAnglesWithSpeed()
     self.position.joint_names = ['HeadPitch', 'HeadYaw']
     self.position.joint_angles = [0.0, 0.0]
     self.head_pub = rospy.Publisher('/pepper_robot/pose/joint_angles',
                                     JointAnglesWithSpeed,
                                     queue_size=1)
     rospy.loginfo("Connected to Head Control.")
コード例 #11
0
    def __init__(self):
        #======================================================================#
        # ROS Setup
        #======================================================================#
        rospy.init_node("headset_control")

        #===== Parameters =====#
        self.frequency = 30
        self.rate = rospy.Rate(self.frequency)

        #----- Pepper Motion Parameters -----#
        self.fraction_max_head_speed = rospy.get_param('~speed_fraction', 0.1)
        self.yaw_offset = rospy.get_param('~yaw_offset', None)

        #----- Calibration Parameters -----#
        self.calibration_time = rospy.get_param('~calibration_time',
                                                3.0)  # sec
        self.calibration_start = 0.0
        self.calibration_average = 0.0

        #----- Joint Names and Angles -----#
        self.joint_names = ['HeadYaw', 'HeadPitch']
        self.angle_setpoints = {}
        for key in self.joint_names:
            self.angle_setpoints[key] = 0.0

        #===== Transform Listener/Broadcaster =====#
        self.tfListener = tf.TransformListener()
        self.tfBroadcaster = tf.TransformBroadcaster()

        #===== Publisher =====#
        self.joint_angles_msg = JointAnglesWithSpeed()
        self.joint_angles_msg.joint_names = self.joint_names
        self.joint_angles_msg.speed = self.fraction_max_head_speed
        self.joint_angles_pub = rospy.Publisher(
            '/pepper_interface/joint_angles',
            JointAnglesWithSpeed,
            queue_size=3)

        #===== Subscriber =====#
        self.calibration_sub = rospy.Subscriber('~calibrate',
                                                Empty,
                                                self.calibrate,
                                                queue_size=1)

        #===== Check for Initial Calibration =====#
        if (self.yaw_offset is None):
            # Perform initial calibration
            print("\n{0}\n{1}{2}\n{0}".format(60 * '=', 24 * ' ',
                                              'Calibration'))
            print("No yaw offset provided. Performing initial calibration.")
            print(
                "Stand facing the desired direction, press 'Enter', and hold for {0} seconds"
                .format(self.calibration_time))
            raw_input("Press Enter:")
            self.calibrate(Empty())
コード例 #12
0
 def __init__(self):
     self.s = JointAnglesWithSpeed()
     self.s.joint_names = ['HeadPitch', 'HeadYaw']
     self.s.relative = 0
     self.s.speed = 0.2
     self.pub_pepper = rospy.Publisher('/pepper_robot/pose/joint_angles',
                                       JointAnglesWithSpeed,
                                       queue_size=0)
     self.pub_node = rospy.Publisher('head_movement_done',
                                     Bool,
                                     queue_size=1)
     self.sub = rospy.Subscriber('head_movement_start', String,
                                 self.move_head_listener)
コード例 #13
0
 def publish_joints(self, names, angles):
     m = JointAnglesWithSpeed()
     m.header.stamp = rospy.Time.now()
     m.joint_names = names
     m.joint_angles = angles
     m.relative = 0
     m.speed = 1
     self.__pub_joints.publish(m)
コード例 #14
0
 def idle(self):
     j = JointAnglesWithSpeed()
     j.joint_names = ['HeadYaw', 'HeadPitch']
     j.joint_angles = [.5, -.5]
     j.speed = .05
     pub = rospy.Publisher("/joint_angles",
                           JointAnglesWithSpeed,
                           queue_size=10)
     while not rospy.is_shutdown() and self.is_idle:
         try:
             rospy.wait_for_message("/naoqi_driver_node/people_detected",
                                    PersonDetectedArray,
                                    timeout=5.)
         except rospy.ROSException, rospy.ROSInterruptException:
             #                j.joint_angles[0] *= -1.
             #                j.joint_angles[1] = np.random.rand()-.5
             j.joint_angles = [
                 np.random.rand() - .5, -(np.random.rand() * .6)
             ]
             pub.publish(j)
コード例 #15
0
    def __init__(self):
        #======================================================================#
        # ROS Setup
        #======================================================================#
        rospy.init_node("test_control")

        #===== Parameters =====#
        self.frequency = 30
        self.rate = rospy.Rate(self.frequency)
        """
        Control Types (default = trajectory):
          trajectory: use the dcm trajectory control topics, for this option
                      you need to be running the appropriate launch file for
                      trajectory control.
          position: use the dcm position control topics, for this option you
                    need to be running the appropriate launch file for
                    position control.
          almotion: use the ALMotion python library, for this option
                    you need to provide the robot's IP address and Port as
                    parameters to this node.
          joint_angles: use the /pepper_robot/pose/joint_angles topic.
        """
        self.control_types = [
            'trajectory', 'position', 'almotion', 'joint_angles'
        ]
        self.control_types_str = "["
        for i in range(len(self.control_types)):
            if i < (len(self.control_types) - 1):
                self.control_types_str += self.control_types[i] + ", "
            else:
                self.control_types_str += self.control_types[i] + "]"

        # Validate "control_type" parameter
        self.control_type = rospy.get_param('~control_type', 'almotion')
        if (self.control_type not in self.control_types):
            rospy.logwarn(
                "[" + rospy.get_name() +
                "]: The control_type parameter '%s', does not match any of the available control types:\n%s\nSetting to 'trajectory'",
                self.control_type, self.control_types_str)
            self.control_type = 'almotion'
        rospy.loginfo("[" + rospy.get_name() + "]: using control type: " +
                      self.control_type)

        #----- Network connection -----#
        # only if using ALMotion
        self.robot_ip = rospy.get_param('~robot_ip', '127.0.0.1')
        self.robot_port = rospy.get_param('~port', 9559)

        #===== Publishers =====#
        # Set publishers based on control type
        # (trajectory is set as the "else" statement in order to make it the
        # default)
        if self.control_type == self.control_types[3]:
            #----- joint_angles control -----#
            self.joint_angles_pub = rospy.Publisher(
                '/pepper_robot/pose/joint_angles',
                JointAnglesWithSpeed,
                queue_size=3)
            self.joint_angles_msg = JointAnglesWithSpeed()
            self.joint_angles_msg.header.seq = 0
            self.joint_anlges_msg.speed = 0.1
        elif self.control_type == self.control_types[2]:
            #----- almotion control -----#
            # Establish connection with robot
            self.motionProxy = ALProxy("ALMotion", self.robot_ip,
                                       self.robot_port)
            self.postureProxy = ALProxy("ALRobotPosture", self.robot_ip,
                                        self.robot_port)
            self.motionProxy.wakeUp()
            self.postureProxy.goToPosture('StandInit', 0.5)

            # Head
            self.HeadJoints = ['HeadYaw', 'HeadPitch']

            # Left Arm
            self.LeftArmJoints = [
                'LShoulderPitch', 'LShoulderRoll', 'LElbowYaw', 'LElbowRoll'
            ]

            # Right Arm
            self.RightArmJoints = [
                'RShoulderPitch', 'RShoulderRoll', 'RElbowYaw', 'RElbowRoll'
            ]
        elif self.control_type == self.control_types[1]:
            #----- position control -----#
            # Head
            self.HeadYaw_pub = rospy.Publisher(
                "/pepper_dcm/HeadYaw_position_controller/command",
                Float64,
                queue_size=10)
            self.HeadYaw_msg = Float64()
            self.HeadPitch_pub = rospy.Publisher(
                "/pepper_dcm/HeadPitch_position_controller/command",
                Float64,
                queue_size=10)
            self.HeadPitch_msg = Float64()

            # Left Arm
            self.LShoulderPitch_pub = rospy.Publisher(
                "/pepper_dcm/LShoulderPitch_position_controller/command",
                Float64,
                queue_size=10)
            self.LShoulderPitch_msg = Float64()
            self.LShoulderRoll_pub = rospy.Publisher(
                "/pepper_dcm/LShoulderRoll_position_controller/command",
                Float64,
                queue_size=10)
            self.LShoulderRoll_msg = Float64()
            self.LElbowYaw_pub = rospy.Publisher(
                "/pepper_dcm/LElbowYaw_position_controller/command",
                Float64,
                queue_size=10)
            self.LElbowYaw_msg = Float64()
            self.LElbowRoll_pub = rospy.Publisher(
                "/pepper_dcm/LElbowRoll_position_controller/command",
                Float64,
                queue_size=10)
            self.LElbowRoll_msg = Float64()

            # Right Arm
            self.RShoulderPitch_pub = rospy.Publisher(
                "/pepper_dcm/RShoulderPitch_position_controller/command",
                Float64,
                queue_size=10)
            self.RShoulderPitch_msg = Float64()
            self.RShoulderRoll_pub = rospy.Publisher(
                "/pepper_dcm/RShoulderRoll_position_controller/command",
                Float64,
                queue_size=10)
            self.RShoulderRoll_msg = Float64()
            self.RElbowYaw_pub = rospy.Publisher(
                "/pepper_dcm/RElbowYaw_position_controller/command",
                Float64,
                queue_size=10)
            self.RElbowYaw_msg = Float64()
            self.RElbowRoll_pub = rospy.Publisher(
                "/pepper_dcm/RElbowRoll_position_controller/command",
                Float64,
                queue_size=10)
            self.RElbowRoll_msg = Float64()
        else:
            #----- trajectory control -----#
            # Head
            self.Head_pub = rospy.Publisher(
                "/pepper_dcm/Head_controller/command",
                JointTrajectory,
                queue_size=10)
            self.Head_msg = JointTrajectory()
            self.Head_msg.joint_names = ['HeadYaw', 'HeadPitch']

            # Left Arm
            self.LeftArm_pub = rospy.Publisher(
                "/pepper_dcm/LeftArm_controller/command",
                JointTrajectory,
                queue_size=10)
            self.LeftArm_msg = JointTrajectory()
            self.LeftArm_msg.joint_names = [
                'LShoulderPitch', 'LShoulderRoll', 'LElbowYaw', 'LElbowRoll',
                'LWristYaw'
            ]

            # Right Arm
            self.RightArm_pub = rospy.Publisher(
                "/pepper_dcm/RightArm_controller/command",
                JointTrajectory,
                queue_size=10)
            self.RightArm_msg = JointTrajectory()
            self.RightArm_msg.joint_names = [
                'RShoulderPitch', 'RShoulderRoll', 'RElbowYaw', 'RElbowRoll',
                'RWristYaw'
            ]

            #===== Subscriber =====#
            # JointState feedback
            self.JointState_sub = rospy.Subscriber("/joint_states",
                                                   JointState,
                                                   self.jointStateCallback,
                                                   queue_size=10)

        #===== Set Desired Starting Pose =====#
        self.head_yaw = -2.0
        self.head_pitch = 0.0
        self.l_shoulder_pitch = 0.0
        self.l_shoulder_roll = 0.0
        self.l_elbow_yaw = 0.0
        self.l_elbow_roll = 0.0
        self.r_shoulder_pitch = 0.0
        self.r_shoulder_roll = 0.0
        self.r_elbow_yaw = 0.0
        self.r_elbow_roll = 0.0

        #===== Previous Angle Positions =====#
        self.head_yaw_current = 0.0
        self.head_pitch_current = 0.0
        self.l_shoulder_pitch_current = 0.0
        self.l_shoulder_roll_current = 0.0
        self.l_elbow_yaw_current = 0.0
        self.l_elbow_roll_current = 0.0
        self.r_shoulder_pitch_current = 0.0
        self.r_shoulder_roll_current = 0.0
        self.r_elbow_yaw_current = 0.0
        self.r_elbow_roll_current = 0.0

        #===== Start Dynamic Reconfigure Server =====#
        # This allows you to change the angles using a slider
        # Start the dynamic reconfigure GUI by using the command
        # $ rosrun rqt_gui rqt_gui -s reconfigure
        reconfig_srv = Server(TestControlConfig, self.dynamicReconfigCallback)
コード例 #16
0
	def callback_waving(self, location):
		'''
		this function takes the information that waving has been detected and decides whether Pepper has to turn his head
		(when the waving is not in the center of view)
		or if he publishes the currect head
		'''
		a,b=location.data.split(" ")
		self.counter2 = 0
		locationlist=[int(a),int(b)]
		if locationlist[0] < 0 and locationlist[1] < 0:
			newpos= abs(locationlist[0]-locationlist[1])
			if newpos > 10:
				newpos = 10
                        msg = JointAnglesWithSpeed()
                        msg.joint_names=["HeadYaw"]
			msg.joint_angles=[newpos*almath.TO_RAD]
			msg.speed=0.05
			msg.relative=1
			self.joint_angles_pub.publish(msg)




		elif locationlist[0] > 0 and locationlist[1] > 0:
			newpos= abs(locationlist[0]-locationlist[1])
			if newpos > 10:
				newpos = 10
			msg = JointAnglesWithSpeed()
                        msg.joint_names=["HeadYaw"]
			msg.joint_angles=[-newpos*almath.TO_RAD]
			msg.speed=0.05
			msg.relative=1
			self.joint_angles_pub.publish(msg)

		else:
			self.counter = self.counter +1
			
			
			#this is supposed to tell you that a waving person has been found in the direction he's looking into
			#the numbers at the end of the sent String are supposed to tell you the current angle of Peppers head, not the direction he's supposed to turn to
			#it uses a type of costum message that hasn't been defined yet
			
			msg = PositionCommand()
			msg.command= "go"
			msg.location= "waving " + str(self.position)
			self.go_to_pub.publish(msg)
コード例 #17
0
ファイル: tracker.py プロジェクト: rosen22004/rapps-nao
    def track_bounding_box(self, polygon):
        self.hunt_initiated = True

        # Set the timeout counter to 2 seconds
        self.lost_object_counter = 20

        # Velocities message initialization
        joint = JointAnglesWithSpeed()
        joint.joint_names.append("HeadYaw")
        joint.joint_names.append("HeadPitch")
        joint.speed = 0.1
        joint.relative = True

        # Get center of detected object and calculate the head turns
        target_x = polygon.points[0].x + 0.5 * polygon.points[1].x
        target_y = polygon.points[0].y + 0.5 * polygon.points[1].y

        sub_x = target_x - 320 / 2.0
        sub_y = target_y - 240 / 2.0

        var_x = (sub_x / 160.0)
        var_y = (sub_y / 120.0)

        joint.joint_angles.append(-var_x * 0.05)
        joint.joint_angles.append(var_y * 0.05)

        ans = self.rh.humanoid_motion.getJointAngles(['HeadYaw', 'HeadPitch'])
        head_pitch = ans['angles'][1]
        head_yaw = ans['angles'][0]

        # Get the sonar measurements
        sonars = self.rh.sensors.getSonarsMeasurements()

        # Check if NAO is close to an obstacle
        if sonars['sonars']['front_left'] <= 0.3 or sonars['sonars'][
                'front_right'] <= 0.3:
            self.lock_motion = True
            rospy.loginfo("Locked due to sonars")
        # Check if NAOs head looks way too down or up
        elif head_pitch >= 0.4 or head_pitch <= -0.4:
            self.lock_motion = True
            rospy.loginfo("Locked due to head pitch")
        # Else approach the object
        elif self.lock_motion is False:
            self.theta_vel = head_yaw * 0.1
            if -0.2 < head_yaw < 0.2:
                print "Approaching"
                self.x_vel = 0.5
                self.y_vel = 0.0
            else:
                self.x_vel = 0.0
                self.y_vel = 0.0
                print "Centering"
            self.joint_pub.publish(joint)

        # Check the battery levels
        batt = self.rh.sensors.getBatteryLevels()
        battery = batt['levels'][0]
        if battery < 25:
            self.rh.audio.setVolume(100)
            self.rh.audio.speak("My battery is low")
            self.predator_sub.unregister()
            self.rh.humanoid_motion.goToPosture("Sit", 0.7)
            self.rh.motion.disableMotors()
            sys.exit(1)
コード例 #18
0
ファイル: tracker.py プロジェクト: rapp-project/rapps-nao
    def track_bounding_box(self, polygon):
        self.hunt_initiated = True

        # Set the timeout counter to 2 seconds
        self.lost_object_counter = 20
        
        # Velocities message initialization
        joint = JointAnglesWithSpeed()
        joint.joint_names.append("HeadYaw")
        joint.joint_names.append("HeadPitch")
        joint.speed = 0.1
        joint.relative = True

        # Get center of detected object and calculate the head turns
        target_x = polygon.points[0].x + 0.5 * polygon.points[1].x
        target_y = polygon.points[0].y + 0.5 * polygon.points[1].y

        sub_x = target_x - 320 / 2.0
        sub_y = target_y - 240 / 2.0

        var_x = (sub_x / 160.0)
        var_y = (sub_y / 120.0)

        joint.joint_angles.append(-var_x * 0.05)
        joint.joint_angles.append(var_y * 0.05)

        ans = self.rh.humanoid_motion.getJointAngles(['HeadYaw', 'HeadPitch'])
        head_pitch = ans['angles'][1]
        head_yaw = ans['angles'][0]

        # Get the sonar measurements
        sonars = self.rh.sensors.getSonarsMeasurements()

        # Check if NAO is close to an obstacle
        if sonars['sonars']['front_left'] <= 0.3 or sonars['sonars']['front_right'] <= 0.3:
            self.lock_motion = True
            rospy.loginfo("Locked due to sonars")
        # Check if NAOs head looks way too down or up
        elif head_pitch >= 0.4 or head_pitch <= -0.4:
            self.lock_motion = True
            rospy.loginfo("Locked due to head pitch")
        # Else approach the object
        elif self.lock_motion is False:
            self.theta_vel = head_yaw * 0.1
            if -0.2 < head_yaw < 0.2:
                print "Approaching"
                self.x_vel = 0.5
                self.y_vel = 0.0
            else:
                self.x_vel = 0.0
                self.y_vel = 0.0
                print "Centering"
            self.joint_pub.publish(joint)

        # Check the battery levels
        batt = self.rh.sensors.getBatteryLevels()
        battery = batt['levels'][0]
        if battery < 25:
            self.rh.audio.setVolume(100)
            self.rh.audio.speak("My battery is low")
            self.predator_sub.unregister()
            self.rh.humanoid_motion.goToPosture("Sit", 0.7)
            self.rh.motion.disableMotors()
            sys.exit(1)
コード例 #19
0
    def callback_camera(self, ros_data):

        if self.tracking == 0:

            # When te ball is detected, save the desired frame and process the image to obtain de HSV values
            self.cv_image = self.bridge.imgmsg_to_cv2(
                ros_data, desired_encoding="passthrough")

        elif self.tracking == 1:

            # Use the bridge to convert the ROS Image message to OpenCV message
            cv_image = self.bridge.imgmsg_to_cv2(
                ros_data, desired_encoding="passthrough")

            # convert the cv_image to the HSV color space
            hsv = cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)

            # construct a mask for the color "colour", then perform a series of dilations and erosions to remove any small blobs left in the mask
            mask = cv2.inRange(hsv, self.colourLower, self.colourUpper)
            mask = cv2.erode(mask, None, iterations=2)
            mask = cv2.dilate(mask, None, iterations=2)

            # find contours in the mask and initialize the current (x, y) center of the ball
            cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
                                    cv2.CHAIN_APPROX_SIMPLE)[-2]
            center = None

            # only proceed if at least one contour was found
            if len(cnts) > 0:
                # find the largest contour in the mask, then use it to compute the minimum enclosing circle and centroid
                c = max(cnts, key=cv2.contourArea)
                ((x, y), radius) = cv2.minEnclosingCircle(c)
                M = cv2.moments(c)
                center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))

                # only proceed if the radius meets a minimum size
                if radius > 10:
                    # draw the circle and centroid on the cv_image
                    cv2.circle(cv_image, (int(x), int(y)), int(radius),
                               (0, 255, 255), 2)
                    cv2.circle(cv_image, center, 5, (0, 0, 255), -1)

            # Use the bridge to convert the OpenCV message to ROS Image message. Publish the modified image on a new topic
            ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8")
            self.publisher.publish(ros_image)

            # Work out the head movement required to keep the ball in the image center
            # Velocities message initialization
            joint = JointAnglesWithSpeed()
            joint.joint_names.append("HeadYaw")
            joint.joint_names.append("HeadPitch")
            joint.speed = 0.1
            joint.relative = True  #True

            # Get center of detected object and calculate the head turns
            target_x = center[0]  # centroid coordenate x
            target_y = center[1]  # centroid coordenate y

            # Image center -> (cv_image.shape[1]/2, cv_image.shape[0]/2)=(320/2,240/2)    (x,y) coordenates
            sub_x = target_x - cv_image.shape[1] / 2
            sub_y = target_y - cv_image.shape[0] / 2

            var_x = (sub_x / float(cv_image.shape[1] / 2))
            var_y = (sub_y / float(cv_image.shape[0] / 2))

            # Check the Head position before moving it to restrict its movement and prevent it from crashing with the shoulders

            # Predict the new Y relative position posicion, the one to be restricted to avoid crashes
            new_position_y = self.positions[1] + var_y * 0.10

            if (new_position_y <= -0.45):
                var_y = 0
            elif (new_position_y >= 0.3):
                var_y = 0

            joint.joint_angles.append(-var_x * 0.25)
            joint.joint_angles.append(var_y * 0.10)

            self.joint_pub.publish(joint)

            # Movement towards the ball (walk)
            walk = Twist()

            print radius
            print self.positions[0]  # HeadYaw <-->
            if radius < 50:  # Walk forward if the ball is far. When radius = 50, Nao is aprox. 25cm from the ball
                # Walk
                if self.positions[0] < (-0.3):
                    walk.linear.x = 0.3
                    walk.linear.y = -0.3

                elif (self.positions[0] >= -0.3) and (self.positions[0] < 0.3):
                    walk.linear.x = 0.3
                    walk.linear.y = 0.0

                elif self.positions[0] >= 0.3:
                    walk.linear.x = 0.3
                    walk.linear.y = 0.3

                walk.linear.z = 0.0

                walk.angular.x = 0.0
                walk.angular.y = 0.0
                walk.angular.z = 0.0

            else:
                # Stop
                walk.linear.x = 0.0
                walk.linear.y = 0.0
                walk.linear.z = 0.0

                walk.angular.x = 0.0
                walk.angular.y = 0.0
                walk.angular.z = 0.0

            self.walk_pub.publish(walk)
コード例 #20
0
    def callback_tactile(self, data):
        rospy.loginfo("I heard %u %u" % (data.button, data.state))

        if data.button == 1 and data.state == 1:
            joint_pub = rospy.Publisher('/joint_angles',
                                        JointAnglesWithSpeed,
                                        queue_size=10)

            rospy.loginfo("Pulsado %u , pasamos" % (data.button))
            joint = JointAnglesWithSpeed()
            joint.joint_names.append("LShoulderRoll")
            joint.joint_angles.append(-0.3)
            joint.joint_names.append("LShoulderPitch")
            joint.joint_angles.append(-0.2)
            joint.joint_names.append("LElbowYaw")
            joint.joint_angles.append(-1)
            joint.joint_names.append("LElbowRoll")
            joint.joint_angles.append(0)
            joint.joint_names.append("LWristYaw")
            joint.joint_angles.append(-0.5)
            joint.joint_names.append("LHand")
            joint.joint_angles.append(0)

            joint.joint_names.append("RShoulderRoll")
            joint.joint_angles.append(0.3)
            joint.joint_names.append("RShoulderPitch")
            joint.joint_angles.append(-0.2)
            joint.joint_names.append("RElbowYaw")
            joint.joint_angles.append(1)
            joint.joint_names.append("RElbowRoll")
            joint.joint_angles.append(0)
            joint.joint_names.append("RWristYaw")
            joint.joint_angles.append(0.5)
            joint.joint_names.append("RHand")
            joint.joint_angles.append(0)

            joint.joint_names.append("HeadYaw")
            joint.joint_angles.append(0)
            joint.joint_names.append("HeadPitch")
            joint.joint_angles.append(0)

            joint.speed = 0.1
            joint.relative = False

            joint_pub.publish(joint)

            # Deactivate tracking mode because the HSV range recongition is in process
            self.tracking = 0

            print "Tracking deactivated"

        elif data.button == 2 and data.state == 1:
            # Save a frame from the camera to detect the ball colour
            output = self.cv_image.copy(
            )  # draw our detected circles without destroying the original image.
            gray = cv2.cvtColor(
                self.cv_image, cv2.COLOR_BGR2GRAY
            )  # the cv2.HoughCircles function requires an 8-bit, single channel image, so we'll go ahead and convert from the RGB color space to grayscale

            # detect circles in the image
            circles = cv2.HoughCircles(gray,
                                       cv2.cv.CV_HOUGH_GRADIENT,
                                       1.2,
                                       100,
                                       param2=80,
                                       minRadius=50,
                                       maxRadius=200)

            if circles == None:
                print 'No ball detected'
            else:
                print circles
                # ensure at least some circles were found

            if circles is not None:
                # convert the (x, y) coordinates and radius of the circles to integers allowing us to draw them on our output image
                circles = np.round(circles[0, :]).astype("int")

                # loop over the (x, y) coordinates and radius of the circles
                for (x, y, r) in circles:
                    # draw the circle in the output image, then draw a rectangle
                    # corresponding to the center of the circle
                    cv2.circle(output, (x, y), r, (0, 255, 0), 4)
                    cv2.rectangle(output, (x - 45, y - 45), (x + 45, y + 45),
                                  (0, 128, 255), -1)

                hsv = cv2.cvtColor(self.cv_image, cv2.COLOR_BGR2HSV)

                # Calculate the HSV medium values inside a  rectangle near to the ball center.
                s = [0, 0, 0]
                for i in range(y - 45, y + 46):
                    for j in range(x - 45, x + 46):
                        s = s + hsv[i, j]
                s = s / ((45 + 46) * (45 + 46))
                print s

                # Calculate the upper and lower limits from the HSV medium values
                if s[1] < 100:  # Saturation<100
                    if (s[0] - 10) < 0:
                        self.colourLower = (0, 40, 40)
                    else:
                        self.colourLower = (s[0] - 10, 40, 40)
                else:
                    if (s[0] - 10) < 0:
                        self.colourLower = (0, 100, 100)
                    else:
                        self.colourLower = (s[0] - 10, 100, 100)

                self.colourUpper = (s[0] + 10, 255, 255)

                print self.colourUpper, self.colourLower

        elif data.button == 3 and data.state == 1:
            joint_pub = rospy.Publisher('/joint_angles',
                                        JointAnglesWithSpeed,
                                        queue_size=10)

            rospy.loginfo("Pulsado %u , pasamos" % (data.button))
            joint = JointAnglesWithSpeed()
            joint.joint_names.append("LShoulderRoll")
            joint.joint_angles.append(0)
            joint.joint_names.append("LShoulderPitch")
            joint.joint_angles.append(1.3)
            joint.joint_names.append("LElbowYaw")
            joint.joint_angles.append(0)
            joint.joint_names.append("LElbowRoll")
            joint.joint_angles.append(0)
            joint.joint_names.append("LWristYaw")
            joint.joint_angles.append(0)
            joint.joint_names.append("LHand")
            joint.joint_angles.append(0)

            joint.joint_names.append("RShoulderRoll")
            joint.joint_angles.append(0)
            joint.joint_names.append("RShoulderPitch")
            joint.joint_angles.append(1.3)
            joint.joint_names.append("RElbowYaw")
            joint.joint_angles.append(0)
            joint.joint_names.append("RElbowRoll")
            joint.joint_angles.append(0)
            joint.joint_names.append("RWristYaw")
            joint.joint_angles.append(0)
            joint.joint_names.append("RHand")
            joint.joint_angles.append(0)

            joint.speed = 0.1
            joint.relative = False

            joint_pub.publish(joint)

            # colour range detected. Activate tracking mode
            self.tracking = 1

            print "Tracking ON"
コード例 #21
0
  def callback_camera(self, ros_data):
    
     # When te ball is detected, save the desired frame and process the image to obtain de HSV values
    if self.tracking == 0:
 
      self.cv_image = self.bridge.imgmsg_to_cv2(ros_data, desired_encoding="passthrough")
  
    elif self.tracking == 1:
      
      # Use the bridge to convert the ROS Image message to OpenCV message
      cv_image = self.bridge.imgmsg_to_cv2(ros_data, desired_encoding="passthrough")
	  
      # convert the cv_image to the HSV color space
      hsv = cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV) 

      # construct a mask for the color "colour", then perform a series of dilations and erosions to remove any small blobs left in the mask
      mask = cv2.inRange(hsv, self.colourLower, self.colourUpper)
      mask = cv2.erode(mask, None, iterations=2) 
      mask = cv2.dilate(mask, None, iterations=2) 

      # find contours in the mask and initialize the current (x, y) center of the ball
      cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
	cv2.CHAIN_APPROX_SIMPLE)[-2]
      center = None

      # only proceed if at least one contour was found
      if len(cnts) > 0:
	# find the largest contour in the mask, then use it to compute the minimum enclosing circle and centroid
	c = max(cnts, key=cv2.contourArea)
	((x, y), radius) = cv2.minEnclosingCircle(c)
	M = cv2.moments(c)
	center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))
	
	# only proceed if the radius meets a minimum size
	if radius > 10:
	  # draw the circle and centroid on the cv_image
	  cv2.circle(cv_image, (int(x), int(y)), int(radius),(0, 255, 255), 2)
	  cv2.circle(cv_image, center, 5, (0, 0, 255), -1)
	  	  
       # Use the bridge to convert the OpenCV message to ROS Image message. Publish the modified image on a new topic
      ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8")
      self.publisher.publish(ros_image)

      # Work out the head movement required to keep the ball in the image center
      # Velocities message initialization
      joint = JointAnglesWithSpeed()
      joint.joint_names.append("HeadYaw")
      joint.joint_names.append("HeadPitch")
      joint.speed = 0.1
      joint.relative = True#True
	  
      # Get center of detected object and calculate the head turns	
      target_x =center[0] # centroid coordenate x
      target_y=center[1] # centroid coordenate y

      # Image center -> (cv_image.shape[1]/2, cv_image.shape[0]/2)=(320/2,240/2)    (x,y) coordenates
      sub_x = target_x - cv_image.shape[1]/2
      sub_y = target_y - cv_image.shape[0]/2

      var_x = (sub_x / float(cv_image.shape[1]/2))
      var_y = (sub_y / float(cv_image.shape[0]/2))
	  
      # Check the Head position before moving it to restrict its movement and prevent it from crashing with the shoulders
	  
      # Predict the new Y relative position posicion, the one to be restricted to avoid crashes
      new_position_y=self.positions[1]+var_y*0.10
	  
      if (new_position_y <= -0.45 ): 
	var_y=0
      elif (new_position_y >= 0.3):
	var_y=0
	    
      joint.joint_angles.append(-var_x*0.25) 
      joint.joint_angles.append(var_y*0.10) 
	
      self.joint_pub.publish(joint)
      
      # Movement towards the ball (walk)
      walk=Twist()
      
      
      print " Radius is (px): ", radius
      print "HeadPitch: ", self.positions[1]
      print "HeadYaw: ", self.positions[0] # HeadYaw <-->
      if radius < 58: # Walk forward if the ball is far. When radius = 50, Nao is aprox. 25cm from the ball
	
	# Walk
	if self.positions[0] < (-0.3):
	  walk.linear.x=0.2
	  walk.angular.z=-0.25
	  
	elif (self.positions[0] >= -0.3) and (self.positions[0] < 0.3):
	  walk.linear.x=0.3
	  walk.angular.z=0.0
	
	elif self.positions[0] >= 0.3:
	  walk.linear.x=0.2
	  walk.angular.z=0.25
	
	walk.linear.y=0.0
	walk.linear.z=0.0
	
	walk.angular.x=0.0
	walk.angular.y=0.0
	
	self.walk_pub.publish(walk)
	
	
      elif radius > 68: # If it is too close, walk backwards
	walk.linear.x=-0.2
	walk.linear.y=0.0  	  
	walk.linear.z=0.0
	
	walk.angular.x=0.0
	walk.angular.y=0.0
	walk.angular.z=0.0
	
	self.walk_pub.publish(walk)
	print "Walk backwards"
      
      else:

	CabezaCentrada=False
	PelotaCentro=False
	
	# Check if the ball is in front of Nao, if not: Nao walks to the corresponding side
	# Check it the head is aligned with the body, and that is not turned
	if self.positions[0] < (-0.3) or self.positions[0] > 0.3: # Use a proportional controller to move with more precision
	  walk.linear.y=0.0
	  e=0-self.positions[0]
	  walk.angular.z=e*(-0.5)

	elif self.positions[0] <  (-0.05): # HeadYaw <-->
	  walk.linear.y=-0.3
	  walk.angular.z=-0.0
	  
	elif self.positions[0] > 0.05:
	  walk.linear.y=0.3
	  walk.angular.z=0.0

	else: # Stop
	  CabezaCentrada=True
	  
	  walk.linear.y=0.0
	  walk.angular.z=0.0
	  print " Head centered "
	
	  walk.linear.x=0.0  	  
	  walk.linear.z=0.0
	
	  walk.angular.x=0.0
	  walk.angular.y=0.0
	
	  self.walk_pub.publish(walk)
	  
	  rospy.sleep(0.3) # Wait some time for the robot to become steady
#
	  if center[0]< 152 or center [0]> 168: # Check if the ball is in the image center looking at its X coordinate
	    e=160-center[0]
	    walk.linear.y=e*(-0.005)
	    walk.angular.z=-0.0
	  else:	# Parar
	    PelotaCentro=True
	    walk.linear.y=0.0
	    walk.angular.z=0.0
	    print " Ball centered "
	
	walk.linear.x=0.0  	  
	walk.linear.z=0.0
	
	walk.angular.x=0.0
	walk.angular.y=0.0
	
	self.walk_pub.publish(walk)
	
	print " Center is: ", center
	print "Pelota Centro: ", PelotaCentro
	print "Cabeza Centrada: ", CabezaCentrada
	
	if PelotaCentro and CabezaCentrada:
	
	  self.image_sub.unregister()

	  # Subscribe to another callback
	  self.image_sub = rospy.Subscriber("/nao_robot/camera/top/camera/image_raw", Image, self.callback_camera2)
コード例 #22
0
  def callback_camera2(self, ros_data):
    print "callback camera 2: centrado de pelota en la imagen"
    

    # Use the bridge to convert the ROS Image message to OpenCV message
    cv_image = self.bridge.imgmsg_to_cv2(ros_data, desired_encoding="passthrough")
	  
    # convert the cv_image to the HSV color space
    hsv = cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV) 

    # construct a mask for the color "colour", then perform a series of dilations and erosions to remove any small blobs left in the mask
    mask = cv2.inRange(hsv, self.colourLower, self.colourUpper)
    mask = cv2.erode(mask, None, iterations=2) 
    mask = cv2.dilate(mask, None, iterations=2) 

    # find contours in the mask and initialize the current (x, y) center of the ball
    cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
      cv2.CHAIN_APPROX_SIMPLE)[-2]
    center = None

    # only proceed if at least one contour was found
    if len(cnts) > 0:
    # find the largest contour in the mask, then use it to compute the minimum enclosing circle and centroid
      c = max(cnts, key=cv2.contourArea)
      ((x, y), radius) = cv2.minEnclosingCircle(c)
      M = cv2.moments(c)
      center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))
	
      # only proceed if the radius meets a minimum size
      if radius > 10:
	# draw the circle and centroid on the cv_image
	cv2.circle(cv_image, (int(x), int(y)), int(radius),(0, 255, 255), 2)
	cv2.circle(cv_image, center, 5, (0, 0, 255), -1)
	  	  
    # Use the bridge to convert the OpenCV message to ROS Image message. Publish the modified image on a new topic
    ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8")
    self.publisher.publish(ros_image)

    if center[0]<155 or center[0]>165: # Check X coordinate
      self.contador=0 # Initialize counter
      # Work out the head movement required to keep the ball in the image center
      # Velocities message initialization
      joint = JointAnglesWithSpeed()
      joint.joint_names.append("HeadYaw")
      joint.joint_names.append("HeadPitch")
      joint.speed = 0.1
      joint.relative = True
	  
      # Get center of detected object and calculate the head turns	
      target_x =center[0] # centroid coordenate x
      target_y=center[1] # centroid coordenate y

      # Image center -> (cv_image.shape[1]/2, cv_image.shape[0]/2)=(320/2,240/2)    (x,y) coordenates
      sub_x = target_x - cv_image.shape[1]/2
      sub_y = target_y - cv_image.shape[0]/2

      var_x = (sub_x / float(cv_image.shape[1]/2))
      var_y = (sub_y / float(cv_image.shape[0]/2))
	  
      # Check the Head position before moving it to restrict its movement and prevent it from crashing with the shoulders
	  
      # Predict the new Y relative position posicion, the one to be restricted to avoid crashes
      new_position_y=self.positions[1]+var_y*0.15

      if (new_position_y <= -0.45 ): 
	var_y=0
      elif (new_position_y >= 0.3):
	var_y=0
	    
      joint.joint_angles.append(-var_x*0.10) #0.25
      joint.joint_angles.append(var_y*0.15) 
	
      self.joint_pub.publish(joint)
    else:
      print "Centrado respecto a X"
      if center[1]<115 or center[1]>125: # Check Y coordinate
	self.contador=0
	# Velocities message initialization
	joint = JointAnglesWithSpeed()
	#joint.joint_names.append("HeadYaw")
	joint.joint_names.append("HeadPitch")
	joint.speed = 0.1
	joint.relative = True#True
	  
	if center[1]<115:
	  var_y=-0.01
	elif center[1]>125:
	  var_y=0.01
	  
	joint.joint_angles.append(var_y)
	
	self.joint_pub.publish(joint)
      else:
	# Counter to ensure the robot is centeredr and it was nota coincidence
	self.contador=self.contador + 1
	print "Centrado respecto a Y"
	print "Contador: ", self.contador
	if self.contador > 10:
	  self.image_sub.unregister()
	  print "Centrado y salimos"
	  print "HeadPitch: ", self.positions[1] # HeadPitch
	  print "HeadYaw: ", self.positions[0] # HeadYaw <--> 
	  print " Center is: ", center
	  
	  # Obtain position
	  Dreal=0.08 # Real Ball diameter (meters)
	  pixelToMeter=Dreal/(radius*2) 
      
	  Yrobot=(cv_image.shape[1]/2 - center[0])*pixelToMeter 
	
	  radiusToMeters=0.25*55 # Distance: 0.25m -> radius = 55px 
	  Xrobot=radiusToMeters/radius
	  
	  # Calculate Z coordinate with the equation that connects it with HeadPitch
	  Zrobot=(-25.866*self.positions[1]+23.843)/100 # Divide by 100 to convert centimeters to meters
	  
	  # Create point
	  point = Point()
	  point.x = Xrobot
	  point.y = Yrobot
	  point.z = Zrobot
	
	  self.pointPub.publish(point)
	  print "Punto publicado: ", point
    
    print " El radio son (px): ", radius
    print "HeadPitch: ", self.positions[1]
    print "HeadYaw: ", self.positions[0] # HeadYaw <--> 
    print " Center is: ", center
コード例 #23
0
#!/usr/bin/env python

import rospy
from std_msgs.msg import String
from geometry_msgs.msg import PoseStamped
from geometry_msgs.msg import Twist
from nav_msgs.msg import Odometry
from moveit_msgs.msg import MoveGroupAction, MoveGroupActionGoal
from moveit_msgs.msg import Constraints, JointConstraint, PositionConstraint, OrientationConstraint, BoundingVolume
from naoqi_bridge_msgs.msg import JointAnglesWithSpeed

MoveGroupActionGoal_msg = MoveGroupActionGoal()
JointAnglesWithSpeed_msg = JointAnglesWithSpeed()


def dataMoveGoal(data):  #MoveGroupActionGoal_msg.goal.request.goal_constraints
    try:
        #JointAnglesWithSpeed_msg.joint_names = data.goal.request.goal_constraints
        #JointAnglesWithSpeed_msg.joint_angles = data.goal.request.goal_constraints
        JointAnglesWithSpeed_msg.joint_names = ["HeadYaw", "HeadPitch"]
        JointAnglesWithSpeed_msg.joint_angles = [0.1, 0.1]
        JointAnglesWithSpeed_msg.speed = 0.3
        JointAnglesWithSpeed_msg.relative = 0

        # sad se tu treba igrati sa stringovima. napraviti petljicu
        for i in data.goal.request.goal_constraints:
            rospy.loginfo("%s ", i)

        rospy.loginfo("%s ", data.goal.request.goal_constraints)
    except:
        rospy.loginfo("error happens in data MoveGoal")
コード例 #24
0
    def callback_camera(self, ros_data):

        if self.tracking == 0:

            self.cv_image = self.bridge.imgmsg_to_cv2(
                ros_data, desired_encoding="passthrough")

        elif self.tracking == 1:

            # Use the bridge to convert the ROS Image message to OpenCV message
            cv_image = self.bridge.imgmsg_to_cv2(
                ros_data, desired_encoding="passthrough")

            # convert the cv_image to the HSV color space
            hsv = cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)

            # construct a mask for the color "colour", then perform a series of dilations and erosions to remove any small blobs left in the mask
            mask = cv2.inRange(hsv, self.colourLower, self.colourUpper)
            mask = cv2.erode(mask, None, iterations=2)
            mask = cv2.dilate(mask, None, iterations=2)

            # find contours in the mask and initialize the current (x, y) center of the ball
            cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
                                    cv2.CHAIN_APPROX_SIMPLE)[-2]
            center = None

            # only proceed if at least one contour was found
            if len(cnts) > 0:
                # find the largest contour in the mask, then use it to compute the minimum enclosing circle and centroid
                c = max(cnts, key=cv2.contourArea)
                ((x, y), radius) = cv2.minEnclosingCircle(c)
                M = cv2.moments(c)
                center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))

                # only proceed if the radius meets a minimum size
                if radius > 10:
                    # draw the circle and centroid on the cv_image
                    cv2.circle(cv_image, (int(x), int(y)), int(radius),
                               (0, 255, 255), 2)
                    cv2.circle(cv_image, center, 5, (0, 0, 255), -1)

            # Conversion of the OpenCV format image to ROS format and publish it in a topic
            ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8")
            self.publisher.publish(ros_image)

            # It calculates the movement of the head necessary to keep the ball in the center of the image
            # Velocities message initialization
            joint = JointAnglesWithSpeed()
            joint.joint_names.append("HeadYaw")
            joint.joint_names.append("HeadPitch")
            joint.speed = 0.1
            joint.relative = True  #True

            # Get center of detected object and calculate the head turns
            target_x = center[0]  # centroid coordenate x
            target_y = center[1]  # centroid coordenate y

            # Image center -> (cv_image.shape[1]/2, cv_image.shape[0]/2)=(320/2,240/2)    (x,y) coordenates
            sub_x = target_x - cv_image.shape[1] / 2
            sub_y = target_y - cv_image.shape[0] / 2

            var_x = (sub_x / float(cv_image.shape[1] / 2))
            var_y = (sub_y / float(cv_image.shape[0] / 2))

            # Check the position of the head before moving it to restrict movement and prevent it from clashing with the shoulders

            # Subscribe to the topic / joint_states and save the first two values ​​(HeadYaw and HeadPitch) in reference variables to compare before performing the move

            # Determine the new relative position in Y, which is the one I restrict to avoid collisions
            new_position_y = self.positions[1] + var_y * 0.10

            if (new_position_y <= -0.45):
                var_y = 0
            elif (new_position_y >= 0.3):
                var_y = 0

            joint.joint_angles.append(-var_x * 0.25)
            joint.joint_angles.append(var_y * 0.10)

            self.joint_pub.publish(joint)

            # Movement towards the ball (walking)
            walk = Twist()

            print " El radio son (px): ", radius
            print "HeadPitch: ", self.positions[1]
            print "HeadYaw: ", self.positions[0]  # HeadYaw <-->
            if radius < 58:  # Approach if far away. If I see radio 50 I am approx. 25cm

                # Walk
                if self.positions[0] < (-0.3):
                    walk.linear.x = 0.2
                    walk.angular.z = -0.25

                elif (self.positions[0] >= -0.3) and (self.positions[0] < 0.3):
                    walk.linear.x = 0.3
                    walk.angular.z = 0.0

                elif self.positions[0] >= 0.3:
                    walk.linear.x = 0.2
                    walk.angular.z = 0.25

                walk.linear.y = 0.0
                walk.linear.z = 0.0

                walk.angular.x = 0.0
                walk.angular.y = 0.0

                self.walk_pub.publish(walk)

            elif radius > 68:  # When Nao is too close to the ball, it walks backwards
                walk.linear.x = -0.2
                walk.linear.y = 0.0
                walk.linear.z = 0.0

                walk.angular.x = 0.0
                walk.angular.y = 0.0
                walk.angular.z = 0.0

                self.walk_pub.publish(walk)
                print " Desplazo atras"

            else:

                CabezaCentrada = False
                PelotaCentro = False
                # Check if the ball is in front of the robot, otherwise it moves to the corresponding side
                # Check that the head is aligned with the body and that it is not turned to any side
                if self.positions[0] < (-0.3) or self.positions[0] > 0.3:
                    walk.linear.y = 0.0
                    e = 0 - self.positions[0]
                    walk.angular.z = e * (-0.5)

                elif self.positions[0] < (-0.05):  # HeadYaw <-->
                    walk.linear.y = -0.3
                    walk.angular.z = -0.0

                elif self.positions[0] > 0.05:
                    walk.linear.y = 0.3
                    walk.angular.z = 0.0

                else:  # Stop walking
                    CabezaCentrada = True

                    walk.linear.y = 0.0
                    walk.angular.z = 0.0
                    print " Cabeza centrada"

                    walk.linear.x = 0.0
                    walk.linear.z = 0.0

                    walk.angular.x = 0.0
                    walk.angular.y = 0.0

                    self.walk_pub.publish(walk)

                    rospy.sleep(0.3)  # For the robot to stabilize

                    if center[0] < 150 or center[
                            0] > 170:  # It is verified that the ball is seen in the center of the image through its X coordinate
                        e = 160 - center[0]
                        walk.linear.y = e * (-0.005)

                        print "Regulador"
                        walk.angular.z = -0.0
                    else:  # Parar
                        PelotaCentro = True
                        walk.linear.y = 0.0
                        walk.angular.z = 0.0
                        print " Pelota en el centro"

                walk.linear.x = 0.0
                walk.linear.z = 0.0

                walk.angular.x = 0.0
                walk.angular.y = 0.0

                self.walk_pub.publish(walk)

                print " Center is: ", center
                print "Pelota Centro: ", PelotaCentro
                print "Cabeza Centrada: ", CabezaCentrada

                if PelotaCentro and CabezaCentrada:

                    self.image_sub.unregister()

                    # Subscribe to other callback
                    self.image_sub = rospy.Subscriber(
                        "/nao_robot/camera/top/camera/image_raw", Image,
                        self.callback_camera2)
コード例 #25
0
    def __init__(self):
        #======================================================================#
        # ROS Setup
        #======================================================================#
        rospy.init_node('kinect_control')

        #===== Parameters =====#
        # Publishing frequency
        self.frequency = 30
        self.rate = rospy.Rate(self.frequency)

        #----- Set Motion Parameters -----#
        # Relative joint velocity (compared to max)
        self.fraction_max_arm_speed = rospy.get_param('~speed_fraction', 0.1)

        # NOTE: This parameter corresponds to a currently UNUSED filter.
        # This is the maximum change allowed per loop in order to smooth out
        # large jumps in the setpoint.
        self.max_angle_change = rospy.get_param('~max_angle_change', np.pi/6)

        #----- Joint Names -----#
        self.joint_names = ['LShoulderPitch', 'LShoulderRoll', 'LElbowYaw', 'LElbowRoll', 'RShoulderPitch', 'RShoulderRoll', 'RElbowYaw', 'RElbowRoll']
        self.base_link_name = 'base_link_K'

        #===== Transform Listener/Broadcaster =====#
        self.tfListener = tf.TransformListener()
        self.tfBroadcaster = tf.TransformBroadcaster()

        #===== Publishers =====#
        self.joint_angles_msg = JointAnglesWithSpeed()
        self.joint_angles_msg.joint_names = self.joint_names
        self.joint_angles_msg.speed = self.fraction_max_arm_speed
        self.joint_angles_pub = rospy.Publisher('/pepper_interface/joint_angles', JointAnglesWithSpeed, queue_size=3)

        #===== Derivation Variables =====#
        # These are the member variables used to calculate the angle setpoints
        # given the joint transforms. See the documentation for how the angles
        # are derived from the joint frames.
        self.shoulder_base_link = {'L': [0, 0, 0], 'R': [0, 0, 0]}
        self.shoulder_rotation = {'L': [0, 0, 0, 1], 'R': [0, 0, 0, 1]}
        self.elbow_base_link = {'L': [0, 0, 0], 'R': [0, 0, 0]}
        self.elbow_shoulder_link = {'L': [0, 0, 0], 'R': [0, 0, 0]}
        self.elbow_rotated = {'L': [0, 0, 0], 'R': [0, 0, 0]}
        self.hand_base_link = {'L': [0, 0, 0], 'R': [0, 0, 0]}
        self.hand_shoulder_link = {'L': [0, 0, 0], 'R': [0, 0, 0]}
        self.hand_rotated = {'L': [0, 0, 0], 'R': [0, 0, 0]}
        self.theta = {'L': 0, 'R': 0}

        #===== Angle Setpoints =====#
        # Current
        self.angle_setpoints = {}
        for key in self.joint_names:
            self.angle_setpoints[key] = 0.0

        # Previous
        self.angle_setpoints_previous = {}
        for key in self.joint_names:
            self.angle_setpoints_previous[key] = self.angle_setpoints[key]
        self.current_time = rospy.get_time()

        # NOTE: This filter is currently UNUSED.
        #===== Filtering Parameters =====#
        # These variables are used for filtering the angle changes when the
        # shoulder and elbow are near singularities and for detecting and
        # smoothing out large jumps when the Kinect loses tracking.
        # EWMA = exponentially weighted moving average
        self.shoulder_pitch_EWMA = {'L': 0, 'R': 0}
        self.elbow_yaw_EWMA = {'L': 0, 'R': 0}
        self.shoulder_learning_rate = {'L': 1, 'R': 1}
        self.elbow_learning_rate = {'L': 1, 'R': 1}
        self.max_learning_rate = 1
        self.min_learning_rate = 0.1
        self.singularity = {
            'LShoulderRoll': np.pi/2,
            'RShoulderRoll': -np.pi/2,
            'LElbowRoll': 0,
            'RElbowRoll': 0
        }
        # The cutoff is the angle where the learning factor becomes 1 and all
        # new angles are used as the setpoint without any averaging.
        self.cutoff = {
            'LShoulderRoll': 3*np.pi/8,
            'RShoulderRoll': -3*np.pi/8,
            'LElbowRoll': -np.pi/8,
            'RElbowRoll': np.pi/8
        }

        # Calculate the linear regression parameters
        self.a = {}
        self.b = {}
        for key in self.singularity.keys():
            self.a[key] = (self.min_learning_rate - self.max_learning_rate) / (self.singularity[key] - self.cutoff[key])
            self.b[key] = self.max_learning_rate - self.a[key]*self.cutoff[key]
        #================================#

        # NOTE: This filter is currently UNUSED
        #===== Large jump detection parameters =====#
        # Angle change to be considered a large jump
        self.large_angle = 3*np.pi/4 # rad
        # Length of averaging window
        # (should be determined based on publishing frequency and how long you want the pose to be stable before going back to tracking)
        self.window_length = 40 # runs at 30Hz, so 30 ~ 1sec
        self.averaging_window = {}
        for key in self.joint_names:
            self.averaging_window[key] = self.window_length*[0]
        # Distance the current pose angle must be from the average before tracking again
        # (all joint angles must be within this threshold)
        self.angle_threshold = np.pi/10 # rad

        # Boolean indicating whether the system has experienced a large jump or if it should continue tracking
        self.large_jump = False
コード例 #26
0
    def main(self):

        ## Configuration parameters
        # Set the language of the speech recognition engine to Spanish:
        self.asrProxy.setLanguage("Spanish")
        # set the language of the synthesis engine to Spanish:
        self.ttsProxy.setLanguage("Spanish")

        # Adds the vocabulary
        vocabulary = [
            "si", "no", "hola", "Nao", "rojo", "azul", "amarillo", "violeta"
        ]
        self.asrProxy.setVocabulary(vocabulary, False)
        # Or, if you want to enable word spotting:
        #self.asrProxy.setVocabulary(vocabulary, True)

        # Wake up robot
        self.motionProxy.wakeUp()

        fractionMaxSpeed = 0.5
        # Go to posture stand
        self.postureProxy.goToPosture("StandInit", fractionMaxSpeed)

        # Introducting the game
        self.ttsProxy.say("Hola! Soy Nao.")

        # Ask the kid to choose one ball (blue, red, yellow)
        self.ttsProxy.say(
            "Elige una de las pelotas y cuando estés listo llámame.")

        # Start sound tracker while the kid is not ready. When the kid says something or claps his hands, Nao turn into his location.
        targetName = "Sound"
        distance = 1
        confidence = 0.4
        parameters = [distance, confidence]
        self.trackerProxy.registerTarget(targetName, parameters)

        mode = "Move"
        self.trackerProxy.setMode(mode)
        # Start tracker.
        self.trackerProxy.track(targetName)

        while True:
            time.sleep(1)
            TargetPosition = self.trackerProxy.getTargetPosition(
                2)  # Frame: 0-Torso, 1-World, 2-Robot
            print "Target Position: ", TargetPosition
            print len(TargetPosition)
            if len(TargetPosition) > 0:
                if TargetPosition[0] > 0 and abs(
                        TargetPosition[1]
                ) < 0.1:  # Exit from the loop when the kid voice is opposite the robot
                    break

        self.trackerProxy.stopTracker()
        self.trackerProxy.unregisterAllTargets()
        self.ttsProxy.say("¡Al fin te encuentro!")

        # Go to posture stand
        self.postureProxy.goToPosture("StandInit", fractionMaxSpeed)

        self.ttsProxy.say("¿Me dejas ver la pelota?")

        repeate_recognition = 1
        count = 0
        while (repeate_recognition == 1):
            self.postureProxy.goToPosture("StandInit", fractionMaxSpeed)
            # Robot posture with hands opened to catch the ball
            #joint_pub = rospy.Publisher('/joint_angles', JointAnglesWithSpeed, queue_size=10)
            rospy.loginfo("Poniendo manos en posición de reconocimiento")
            joint = JointAnglesWithSpeed()
            joint.joint_names.append("LShoulderRoll")
            joint.joint_angles.append(-0.35)
            joint.joint_names.append("LShoulderPitch")
            joint.joint_angles.append(-0.30)
            joint.joint_names.append("LElbowYaw")
            joint.joint_angles.append(-1.00)
            joint.joint_names.append("LElbowRoll")
            joint.joint_angles.append(0.00)
            joint.joint_names.append("LWristYaw")
            joint.joint_angles.append(-1.50)
            joint.joint_names.append("LHand")
            joint.joint_angles.append(1)

            joint.joint_names.append("RShoulderRoll")
            joint.joint_angles.append(0.35)
            joint.joint_names.append("RShoulderPitch")
            joint.joint_angles.append(-0.30)
            joint.joint_names.append("RElbowYaw")
            joint.joint_angles.append(1.00)
            joint.joint_names.append("RElbowRoll")
            joint.joint_angles.append(0.00)
            joint.joint_names.append("RWristYaw")
            joint.joint_angles.append(1.50)
            joint.joint_names.append("RHand")
            joint.joint_angles.append(1)

            joint.joint_names.append("HeadYaw")
            joint.joint_angles.append(0.00)
            joint.joint_names.append("HeadPitch")
            joint.joint_angles.append(0.00)

            joint.speed = 0.1
            joint.relative = False

            self.joint_pub.publish(joint)

            # Wait some time for the kid to put the ball in Nao's hands
            time.sleep(10)  # 10 seconds

            # Take a frame with the ball to recognize the colour
            ball_detected = 0
            while (ball_detected == 0):
                output = self.cv_image.copy(
                )  # Draw the detected circles without destroying the original image.
                gray = cv2.cvtColor(
                    self.cv_image, cv2.COLOR_BGR2GRAY
                )  # The cv2.HoughCircles function requires an 8-bit, single channel image, so we'll go ahead and convert from the RGB color space to grayscale

                # Detect circles in the image
                circles = cv2.HoughCircles(gray,
                                           cv2.cv.CV_HOUGH_GRADIENT,
                                           1.2,
                                           100,
                                           param2=80,
                                           minRadius=50,
                                           maxRadius=200)

                if circles == None:
                    print 'No se ha detectado la pelota'
                    self.ttsProxy.say("No soy capaz de reconocer la pelota")
                    time.sleep(5)
                else:
                    print 'Se ha detectado la pelota: ', circles
                    ball_detected = 1
                    # Ensure at least some circles were found

                if circles is not None:
                    # Convert the (x, y) coordinates and radius of the circles to integers allowing us to draw them on our output image
                    circles = np.round(circles[0, :]).astype("int")

                    # Loop over the (x, y) coordinates and radius of the circles
                    for (x, y, r) in circles:
                        # Draw the circle in the output image, then draw a rectangle
                        # Corresponding to the center of the circle
                        cv2.circle(output, (x, y), r, (0, 255, 0), 4)
                        cv2.rectangle(output, (x - 45, y - 45),
                                      (x + 45, y + 45), (0, 128, 255), -1)
                        ###

                    hsv = cv2.cvtColor(self.cv_image, cv2.COLOR_BGR2HSV)

                    # Calculate the mean of the HSV values ​​within a rectangle (side 45) close to the center of the detected ball
                    s = [0, 0, 0]
                    for i in range(y - 45, y + 46):
                        for j in range(x - 45, x + 46):
                            s = s + hsv[i, j]
                            #print j,i, hsv[i,j]
                    s = s / ((45 + 46) * (45 + 46))
                    print s

                    # I calculate the upper and lower limits from the mean HSV
                    if s[1] < 100:  # Saturation < 100
                        if (s[0] - 10) < 0:
                            self.colourLower = (0, 40, 40)
                        else:
                            self.colourLower = (s[0] - 10, 40, 40)
                    else:
                        if (s[0] - 10) < 0:
                            self.colourLower = (0, 100, 100)
                        else:
                            self.colourLower = (s[0] - 10, 100, 100)

                    self.colourUpper = (s[0] + 10, 255, 255)

                    print "Rango de colores. Limites superior e inferior: ", self.colourUpper, self.colourLower
                    # Compare the colour range with the ones in the data base and say to the kid what colour is it
                    if s[0] < 20:  # Red
                        colour = "rojo"
                    elif s[0] > 90 and s[0] < 110:  # Blue
                        colour = "azul"
                    elif s[0] >= 20 and s[0] <= 40:  # Yellow
                        colour = "amarillo"
                    elif s[0] >= 130 and s[0] <= 150:  # Violet
                        colour = "violeta"
                    else:  # The colour is not  in the data base
                        colour = "no"

                    self.ttsProxy.say("Ya la puedes coger")

                    # Wait
                    time.sleep(5)  # 5 seconds

                    # Go to posture stand
                    self.postureProxy.goToPosture("StandInit",
                                                  fractionMaxSpeed)

                    if colour == "no":
                        self.ttsProxy.say("No he visto bien el color")
                        self.ttsProxy.say("¿Me dejas ver de nuevo la pelota?")
                        repeate_recognition = 1
                    else:

                        self.ttsProxy.say(
                            "¿Quieres que siga una pelota de color" + colour +
                            "¿Es eso cierto?")

                        respuesta = 0
                        while (respuesta == 0):
                            # Start the speech recognition engine with user Test_ASR
                            self.asrProxy.subscribe("Test_ASR")
                            self.subscribed = True
                            print 'Speech recognition engine started'
                            time.sleep(5)
                            WordHeard = self.Memory.getData(
                                "WordRecognized")[0]
                            # Stop the speech recognition engine with user Test_ASR
                            self.asrProxy.unsubscribe("Test_ASR")
                            self.subscribed = False
                            print "contador: ", count
                            if WordHeard == "si":
                                respuesta = 1
                                self.ttsProxy.say(
                                    "¡Genial! Comencemos a jugar")
                                repeate_recognition = 0
                            elif WordHeard == "no":
                                respuesta = 1

                                count += 1
                                if count > 2:
                                    self.ttsProxy.say(
                                        "¡Ai ai ai! ¿De qué color es entonces?"
                                    )
                                    repeate_recognition = 0

                                    respuesta2 = 0
                                    while (respuesta2 == 0):
                                        repeate_recognition = 0
                                        # Start the speech recognition engine with user Test_ASR
                                        self.asrProxy.subscribe("Test_ASR")
                                        self.subscribed = True
                                        print 'Speech recognition engine started'
                                        time.sleep(5)
                                        WordHeard = self.Memory.getData(
                                            "WordRecognized")[0]
                                        # Stop the speech recognition engine with user Test_ASR
                                        self.asrProxy.unsubscribe("Test_ASR")
                                        self.subscribed = False
                                        if WordHeard == "rojo":
                                            respuesta2 = 1
                                            colour = "rojo"
                                            self.colourLower = (0, 100, 100)
                                            self.colourUpper = (20, 255, 255)
                                            self.ttsProxy.say(
                                                "Muy bien, seguiré una pelota de color"
                                                + colour)
                                        elif WordHeard == "azul":
                                            respuesta2 = 1
                                            colour = "azul"
                                            self.colourLower = (90, 100, 100)
                                            self.colourUpper = (110, 255, 255)
                                            self.ttsProxy.say(
                                                "Muy bien, seguiré una pelota de color"
                                                + colour)
                                        elif WordHeard == "violeta":
                                            respuesta2 = 1
                                            colour = "violeta"
                                            self.colourLower = (135, 40, 40)
                                            self.colourUpper = (155, 255, 255)
                                            self.ttsProxy.say(
                                                "Muy bien, seguiré una pelota de color"
                                                + colour)
                                        elif WordHeard == "amarillo":
                                            respuesta2 = 1
                                            colour = "amarillo"
                                            self.colourLower = (20, 100, 100)
                                            self.colourUpper = (40, 255, 255)
                                            self.ttsProxy.say(
                                                "Muy bien, seguiré una pelota de color"
                                                + colour)
                                        else:
                                            self.ttsProxy.say(
                                                "Disculpa, tendrás que volver a repetírmelo"
                                            )

                                else:
                                    self.ttsProxy.say(
                                        "¡Ups! Entonces tendré que volver a ver la pelota"
                                    )
                                    repeate_recognition = 1

                            else:
                                self.ttsProxy.say(
                                    "Perdona, no te he entendido bien. ¿Puedes repetirme tu respuesta?"
                                )

        self.postureProxy.goToPosture("StandInit", fractionMaxSpeed)

        print "Tracker activado"
        self.ttsProxy.say(
            "Ahora muévete y enséñame la pelota, que ya voy a por ella")

        self.ttsProxy.say("Cuando quieras que la coja párate")
        self.tracking = 1

        while self.end is not True:
            time.sleep(1)

        self.postureProxy.goToPosture("Stand", fractionMaxSpeed)
        self.ttsProxy.say("¿Quieres volver a jugar?")

        respuesta3 = 0
        while (respuesta3 == 0):
            # Start the speech recognition engine with user Test_ASR
            self.asrProxy.subscribe("Test_ASR")
            self.subscribed = True
            print 'Speech recognition engine started'
            time.sleep(5)
            WordHeard = self.Memory.getData("WordRecognized")[0]
            # Stop the speech recognition engine with user Test_ASR
            self.asrProxy.unsubscribe("Test_ASR")
            self.subscribed = False
            if WordHeard == "si":
                respuesta3 = 1
                self.ttsProxy.say(
                    "¡Genial! Volvamos a jugar con una pelota de otro color")
                self.play_again = True
            elif WordHeard == "no":
                respuesta3 = 1
                self.ttsProxy.say("Joo, ¡Qué pena!")
                self.play_again = False
                raise KeyboardInterrupt
            else:
                self.ttsProxy.say(
                    "Perdona, no te he entendido bien. ¿Puedes repetirme tu respuesta?"
                )
コード例 #27
0
ファイル: head_shoulder.py プロジェクト: MiyabiTane/kikai_B
    message_arrived=msg
def headtouch_cb(msg):
    global head_touch_message_arrived
    head_touch_message_arrived=msg

if __name__=='__main__':
    try:
        message_arrived=False
        rospy.init_node('talker6')
        rospy.Subscriber('/camshift/track_box',RotatedRectStamped,talker6)
        rospy.Subscriber('/pepper_robot/head_touch',HeadTouch,headtouch_cb)
        pub=rospy.Publisher('/pepper_robot/pose/joint_angles',JointAnglesWithSpeed,queue_size=1)
        #pub=rospy.Publisher('/pepper_robot/pose/joint_angles',JointAnglesWithSpeed,queue_size=1)
        rospy.sleep(1) #omajinai
        while not rospy.is_shutdown():
            hello=JointAnglesWithSpeed();
            #if message_arrived:
            if  head_touch_message_arrived and head_touch_message_arrived.button==1 and head_touch_message_arrived.state==1:
                print("message_arrived")
                if True:#(message_arrived.rect.size.width*message_arrived.rect.size.height>4000):
                    hello.joint_names.append("LShoulderPitch")
                    hello.joint_names.append("RShoulderPitch")

                    hello.speed=0.05
                    #hello.relative=0
                    hello.joint_angles=[-0.05,-0.05]

                    print(message_arrived)

            message_arrived=False
            pub.publish(hello);
コード例 #28
0
    def __init__(self):
        #======================================================================#
        # Create Pepper Model for Inverse Kinematics
        #======================================================================#
        # Set Joints
        self.joint_names = [
            'LShoulderPitch', 'LShoulderRoll', 'LElbowYaw', 'LElbowRoll',
            'LWristYaw', 'RShoulderPitch', 'RShoulderRoll', 'RElbowYaw',
            'RElbowRoll', 'RWristYaw'
        ]
        self.joint_names_side = {
            'L': self.joint_names[0:5],
            'R': self.joint_names[5:10]
        }
        self.angle_setpoints = {}
        for key in self.joint_names:
            self.angle_setpoints[key] = 0.0

        # Set bounds for optimization
        self.bounds_arm = {
            'L': [(-2.0857, 2.0857), (0.0087, 1.5620), (-2.0857, 2.0857),
                  (-1.5620, -0.0087)],
            'R': [(-2.0857, 2.0857), (-1.5620, -0.0087), (-2.0857, 2.0857),
                  (0.0087, 1.5620)]
        }
        self.bounds_wrist = {
            'L': [(-1.8239, 1.8239)],
            'R': [(-1.8239, 1.8239)]
        }

        # These are the standard poses used for starting the optimization
        self.standard_poses = np.array(
            [[
                -self.bounds_arm['L'][0][0], 0.0, -np.pi / 2.0, -np.pi / 4.0,
                0.0, -self.bounds_arm['R'][0][0], 0.0, np.pi / 2.0,
                np.pi / 4.0, 0.0
            ],
             [
                 np.pi / 2.0, np.pi / 2.0, -np.pi / 2.0, -0.3, 0.0,
                 np.pi / 2.0, -np.pi / 2.0, np.pi / 2.0, 0.3, 0.0
             ], [0.2, 0.2, -1.0, -0.4, 0.0, 0.2, -0.2, 1.0, 0.4, 0.0]])

        # Pepper Model Object
        self.pepper_model = PepperModel()

        #======================================================================#
        # ROS Setup
        #======================================================================#
        #===== Start Node =====#
        rospy.init_node('vr_hand_controllers')

        #===== Parameters =====#
        # Loop rate
        self.frequency = 30
        self.rate = rospy.Rate(self.frequency)
        # Ratio of Pepper's arm to human arm
        self.arm_ratio = rospy.get_param('~arm_ratio', 1.0)
        rospy.loginfo('[{0}]: Arm ratio: {1}'.format(rospy.get_name(),
                                                     self.arm_ratio))
        self.velocity_linear_max = rospy.get_param('~velocity_linear_max', 0.3)
        self.velocity_angular_max = rospy.get_param('~velocity_angular_max',
                                                    0.4)
        rospy.loginfo(
            '[{0}]: Max linear velocity: {1}, Max angular velocity: {2}'.
            format(rospy.get_name(), self.velocity_linear_max,
                   self.velocity_angular_max))
        self.joystick_deadband = rospy.get_param('~joystick_deadband', 0.2)
        # Name of left controller frame from vive_ros
        self.left_name = rospy.get_param('~left_name',
                                         'controller_LHR_FFF73D47')
        # Name of right controller frame from vive_ros
        self.right_name = rospy.get_param('~right_name',
                                          'controller_LHR_FFFAFF45')
        # The user must calibrate the yaw offset so that the X axis points in
        # the foward direction. This is the fixed frame to which the
        # 'world_rotated' frame will be attached, but with the X axis poining
        # forward.
        # NOTE: It is assumed this frame is level with the ground.
        self.fixed_frame = rospy.get_param('~fixed_frame', 'world')
        # Sets the joint speed of the arms
        self.fraction_max_arm_speed = rospy.get_param('~speed_fraction', 0.1)
        self.calibration_time = rospy.get_param('~calibration_time', 1.0)
        self.yaw_offset = rospy.get_param('/headset_control/yaw_offset', None)
        if self.yaw_offset is None:
            rospy.loginfo(
                '[{0}]: No yaw_offset parameter found at "/headset_control/yaw_offset". Using value set at "{0}/yaw_offset"'
                .format(rospy.get_name()))
            self.yaw_offset = rospy.get_param('~yaw_offset', None)
        else:
            # THe headset_control yaw_offset is in the opposite direction as
            # desired here, so the value must be flipped.
            self.yaw_offset *= -1
        # Error weights used in the optimization
        self.position_weight = rospy.get_param('~position_weight', 100.0)
        self.orientation_weight = rospy.get_param('~orientation_weight', 1.0)

        #=====- TF Listener and Broadcaster =====#
        self.tfListener = tf.TransformListener()
        self.tfBroadcaster = tf.TransformBroadcaster()

        #===== Publishers =====#
        self.joint_angles_msg = JointAnglesWithSpeed()
        self.joint_angles_msg.joint_names = self.joint_names
        self.joint_angles_msg.speed = self.fraction_max_arm_speed
        self.joint_angles_pub = rospy.Publisher(
            '/pepper_interface/joint_angles',
            JointAnglesWithSpeed,
            queue_size=3)
        self.cmd_vel_msg = Twist()
        self.cmd_vel_pub = rospy.Publisher('/pepper_interface/cmd_vel',
                                           Twist,
                                           queue_size=3)
        self.left_hand_grasp_msg = Float64()
        self.left_hand_grasp_pub = rospy.Publisher(
            '/pepper_interface/grasp/left', Float64, queue_size=3)
        self.right_hand_grasp_msg = Float64()
        self.right_hand_grasp_pub = rospy.Publisher(
            '/pepper_interface/grasp/right', Float64, queue_size=3)

        #===== Subscribers =====#
        self.left_controller_sub = rospy.Subscriber('/vive/' + self.left_name +
                                                    '/joy',
                                                    Joy,
                                                    self.leftCallback,
                                                    queue_size=1)
        self.right_controller_sub = rospy.Subscriber('/vive/' +
                                                     self.right_name + '/joy',
                                                     Joy,
                                                     self.rightCallback,
                                                     queue_size=1)

        #======================================================================#
        # Controller Setup and Calibration
        #======================================================================#
        #===== Controller Transforms =====#
        self.controller = {
            'L':
            Transform([0., 0., 0.], [0., 0., 0., 1.], 'world_rotated',
                      self.left_name),
            'R':
            Transform([0., 0., 0.], [0., 0., 0., 1.], 'world_rotated',
                      self.right_name)
        }
        self.controller_rotated = {
            'L':
            Transform([0., 0., 0.], [0., 0., 0., 1.], 'world_rotated',
                      'LHand_C'),
            'R':
            Transform([0., 0., 0.], [0., 0., 0., 1.], 'world_rotated',
                      'RHand_C')
        }
        # Multiply the controller rotation by this rotation to adjust it to the
        # correct frame.
        # The initial orientation for the controllers are x: right, y: up, z:
        # backwards. Needed orientation is x: forward, y: left, z: up.
        self.controller_rotation = tf.transformations.quaternion_from_euler(
            0., math.pi / 2, -math.pi / 2, 'rzyx')

        #===== Yaw_offset calibration =====#
        # This rotation is applied to the controllers to rotate them about the
        # world frame to be oriented such that the operator's forward position
        # is directly along the X axis of the world. This new frame is published
        # as 'world_rotated'
        self.world_rotated_position = [0., 0., 0.]
        self.world_rotated = Transform(self.world_rotated_position,
                                       [0., 0., 0., 1.], self.fixed_frame,
                                       'world_rotated')

        self.running_calibration = False

        # If the 'headset_control' node has not set the yaw_offset parameter and
        # it is not provided as a parameter for this node, then it must be
        # calibrated here.
        if self.yaw_offset is None:
            self.yaw_offset = 0
            rospy.loginfo(
                '[{0}]: No yaw_offset parameter found at {0}/yaw_offset. You will need to perform a calibration.'
                .format(rospy.get_name()))
            print('\n{0}\n{1}{2}\n{0}'.format(60 * '=', 16 * ' ',
                                              'Orientation Calibration'))
            print(
                'Point both controllers towards the front direction. Press the side button on the LEFT controller and hold for {0} seconds.'
                .format(self.calibration_time))

            # Wait for user to press the side button and for calibration to
            # complete
            self.orientation_calibration = False
            while not self.orientation_calibration:
                self.rate.sleep()

        # Publish the new 'world_rotated' frame
        self.world_rotated.broadcast(self.tfBroadcaster)

        #===== Hand Origin Calibration =====#
        # Controller Origin
        self.controller_origin = {'L': [0., 0., 0.], 'R': [0., 0., 0.]}
        print('\n{0}\n{1}{2}\n{0}'.format(60 * '=', 16 * ' ',
                                          'Position Calibration'))
        print(
            'Point arms straight towards the ground. Press the side button on the RIGHT controller and hold position for {0} seconds.'
            .format(self.calibration_time))

        # Wait for user to press the side button and for calibration to complete
        self.position_calibration = False
        while not self.position_calibration:
            self.rate.sleep()

        #======================================================================#
        # Debugging Frames
        #======================================================================#
        # Transform to link Pepper's base_link to the world frame
        self.base_link = Transform([0., 0., 0.82], [0., 0., 0., 1.],
                                   'world_rotated', 'base_link_V')

        # DEBUG: Test pose for controllers
        self.test_pose = {
            'L':
            Transform([0.7, 0.3, 1.2],
                      [-0.7427013, 0.3067963, 0.5663613, 0.1830457],
                      'world_rotated', 'LHand_C'),
            'R':
            Transform([0.8, -0.3, 1.5],
                      [0.771698, 0.3556943, -0.5155794, 0.1101889],
                      'world_rotated', 'RHand_C')
        }

        # Display the origin and setpoint for human and pepper
        self.pepper_origin = {
            'L':
            Transform(self.pepper_model.hand_origin['L'], [0., 0., 0., 1.],
                      'base_link_V', 'LOrigin_P'),
            'R':
            Transform(self.pepper_model.hand_origin['R'], [0., 0., 0., 1.],
                      'base_link_V', 'ROrigin_P')
        }
        self.pepper_setpoint = {
            'L':
            Transform([0., 0., 0.], [0., 0., 0., 1.], 'base_link_V',
                      'LSetpoint_P'),
            'R':
            Transform([0., 0., 0.], [0., 0., 0., 1.], 'base_link_V',
                      'RSetpoint_P')
        }
        self.human_origin = {
            'L':
            Transform(self.controller_origin['L'], [0., 0., 0., 1.],
                      'world_rotated', 'LOrigin_H'),
            'R':
            Transform(self.controller_origin['R'], [0., 0., 0., 1.],
                      'world_rotated', 'ROrigin_H')
        }
コード例 #29
0
 def __init__(self):
     self.position = JointAnglesWithSpeed()
     self.arm_pub = rospy.Publisher('/pepper_robot/pose/joint_angles',
                                    JointAnglesWithSpeed,
                                    queue_size=1)
     rospy.loginfo("Connected to Arm Control.")
コード例 #30
0
	def track_bounding_box(self, polygon):
		self.hunt_initiated = True
		self.lost_object_counter = 20
		
		joint = JointAnglesWithSpeed()
	
		joint.joint_names.append("HeadYaw")
		joint.joint_names.append("HeadPitch")

		#~ print polygon
		joint.speed = 0.1
		joint.relative = True

		target_x = polygon.points[0].x + 0.5 * polygon.points[1].x
		target_y = polygon.points[0].y + 0.5 * polygon.points[1].y
		
		sub_x = target_x - 320 / 2.0
		sub_y = target_y - 240 / 2.0
 
		var_x = (sub_x / 160.0)
		var_y = (sub_y / 120.0)
		
		joint.joint_angles.append(-var_x * 0.05)
		joint.joint_angles.append(var_y * 0.05)
				
		print self.rh.sensors.getSonarsMeasurements()
		
		ans = self.rh.humanoid_motion.getJointAngles(['HeadYaw', 'HeadPitch'])['angles']
		head_yaw = ans[0]
		head_pitch = ans[1]
		#~ print 'HeadPitch' + str(head_pitch)
		#~ print 'HeadYaw' + str(head_yaw)
		
		
		sonars = self.rh.sensors.getSonarsMeasurements()['sonars']
		
		if (sonars['front_left'] <= self.sonar_value or sonars['front_right'] <= self.sonar_value) and self.lock_motion == False:
			self.lock_motion = True
			rospy.loginfo("Locked due to sonars")
			self.find_distance_with_sonars = True
			
		elif (head_pitch >= self.head_pitch_value or head_pitch <= -self.head_pitch_value) and self.lock_motion == False:
			self.lock_motion = True
			rospy.loginfo("Locked due to head pitch")
			
		print "self.lock_motion:", self.lock_motion
		
		if self.lock_motion is False:
			self.theta_vel = head_yaw * 0.1
			if -self.head_yaw_value < head_yaw < self.head_yaw_value:
				self.x_vel = 0.3
			self.pub.publish(joint)
		else:
			self.x_vel = 0
			self.y_vel = 0
			self.theta_vel = 0
			
		if  self.lock_motion is True:
			#~ self.disableObjectTracking()
			
			print "sonars:", sonars['front_left'], sonars['front_right']
			print "Head_pitch:",head_pitch
			print "Head_yaw:",head_yaw
			print "Sub_x:", sub_x
			print "Sub_y:", sub_y
			
			dx = 0
			sy = 0
			if self.find_distance_with_sonars is True and\
				(sonars['front_left'] <= self.sonar_value or sonars['front_right'] <= self.sonar_value):				
				if (sonars['front_left'] <= sonars['front_right']):
					dx = sonars['front_left']
					sy = +1
				else:
					dx = sonars['front_right']
					sy = -1
			else:
				x = (sub_y * 47.6* 3.14159 / 180) / 240.0
				print "x=", x
				total_x = head_pitch + x + 0.021
				print "total_x=",total_x
				dx = 0.53 / math.tan(total_x)
				sy = -1
				
			print "dx= ",dx
			y = (sub_x * 60.9 * 3.14159 / 180) / 320.0
			print "y=", y
			total_y = head_yaw + sy * y
			print "total_y=",total_y
			dy = dx * math.tan(total_y)
			print "dy= " ,dy
			
			self.disableObjectTracking()
			
		battery = self.rh.sensors.getBatteryLevels()['levels'][0]
		
		if battery < 25:
			self.rh.audio.setVolume(100)
			self.rh.audio.speak("My battery is low")
			self.sub.unregister()
			self.rh.humanoid_motion.goToPosture("Sit", 0.7)
			self.rh.motion.disableMotors()
			sys.exit(1)
コード例 #31
0
    def facecallback(self, event):
        # Callback of the top camera of the nao_robot

        # Use the bridge to convert the ROS Image message to OpenCV message

        if self.preparado is not True:
            return

        cv_image = self.bridge.imgmsg_to_cv2(self.ros_data,
                                             desired_encoding="passthrough")

        face_cascade = cv2.CascadeClassifier(
            'haarcascade_frontalface_default.xml')

        gray = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY)

        faces = face_cascade.detectMultiScale(gray,
                                              scaleFactor=1.2,
                                              minNeighbors=5,
                                              minSize=(30, 30),
                                              flags=cv2.cv.CV_HAAR_SCALE_IMAGE)

        # Draw a rectangle around the detected faces
        for (x, y, w, h) in faces:
            cv2.rectangle(cv_image, (x, y), (x + w, y + h), (255, 0, 0),
                          2)  # The center of the rectangle is: (x+w/2,y+h/2)

        # Look for the biggest face, the one to track
        if len(faces) > 0:
            c = faces[0]
            for i in faces:
                if i[2] > c[2]:
                    c = i

            cv2.rectangle(cv_image, (c[0], c[1]), (c[0] + c[2], c[1] + c[3]),
                          (255, 255, 0), 2)

            center = (c[0] + c[2] / 2, c[1] + c[3] / 2)  # Center of the face
            cv2.circle(cv_image, center, 3, (0, 0, 255), -1)

        else:
            center = (cv_image.shape[1] / 2, cv_image.shape[0] / 2
                      )  # Image center
            print "No faces found"

        # Convert the image format from OpenCV to ROS and publish the modified image on a new topic
        ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8")
        self.publisher.publish(ros_image)

        ### Work out the movemento of the head required to keep the ball center in the image center
        # Velocities message initialization
        joint = JointAnglesWithSpeed()
        joint.joint_names.append("HeadYaw")
        joint.joint_names.append("HeadPitch")
        joint.speed = 0.1
        joint.relative = True  #True

        # Get center of detected object and calculate the head turns
        target_x = center[0]  # centroid coordenate x
        target_y = center[1]  # centroid coordenate y

        # Image center -> (cv_image.shape[1]/2, cv_image.shape[0]/2)=(320/2,240/2)    (x,y) coordenates
        sub_x = target_x - cv_image.shape[1] / 2
        sub_y = target_y - cv_image.shape[0] / 2

        var_x = (sub_x / float(cv_image.shape[1] / 2))
        var_y = (sub_y / float(cv_image.shape[0] / 2))

        # Check the Head position before moving it to restrict its movement and prevent it from crashing with the shoulders

        # Predict the new Y relative position posicion, the one to be restricted to avoid crashes
        new_position_y = self.positions[1] + var_y * 0.10

        if (new_position_y <= -0.45):  # Recalculate the maximun variation
            var_y = 0
        elif (new_position_y >= 0.3):
            var_y = 0

        joint.joint_angles.append(-var_x * 0.35)
        joint.joint_angles.append(var_y * 0.15)

        self.joint_pub.publish(joint)