def handle_enable_leaning_forward(self, msg):
        self.enable_leaning_forward = msg.data
        if msg.data:
            self.motionProxy.setBreathEnabled('Legs', False)

            cmd_msg = JointAnglesWithSpeed()
            cmd_msg.header.stamp = rospy.Time.now()
            cmd_msg.joint_names.append('LHipPitch')
            cmd_msg.joint_angles.append(-9.0 * math.pi / 180.0)
            cmd_msg.joint_names.append('RHipPitch')
            cmd_msg.joint_angles.append(-9.0 * math.pi / 180.0)
            cmd_msg.speed = 0.06
            cmd_msg.relative = 0

            self.pub_joint_cmd.publish(cmd_msg)
        else:
            cmd_msg = JointAnglesWithSpeed()
            cmd_msg.header.stamp = rospy.Time.now()
            cmd_msg.joint_names.append('LHipPitch')
            cmd_msg.joint_angles.append(0.0 * math.pi / 180.0)
            cmd_msg.joint_names.append('RHipPitch')
            cmd_msg.joint_angles.append(0.0 * math.pi / 180.0)
            cmd_msg.speed = 0.06
            cmd_msg.relative = 0

            self.motionProxy.setBreathEnabled('Legs', True)
    def handle_idle_status(self, msg):
        if msg.enabled:
            self.motionProxy.setBreathEnabled('Body', True)
            self.motionProxy.setBreathEnabled('Head', False)

            if self.enable_leaning_forward:
                self.motionProxy.setBreathEnabled('Legs', False)
            self.motionProxy.setBreathConfig([['Bpm', 12], ['Amplitude', 0.8]])

            if self.enable_leaning_forward:
                rospy.sleep(0.6)

                cmd_msg = JointAnglesWithSpeed()
                cmd_msg.header.stamp = rospy.Time.now()
                cmd_msg.joint_names.append('LHipPitch')
                cmd_msg.joint_angles.append(-9.0 * math.pi / 180.0)
                cmd_msg.joint_names.append('RHipPitch')
                cmd_msg.joint_angles.append(-9.0 * math.pi / 180.0)
                cmd_msg.speed = 0.1
                cmd_msg.relative = 0

                self.pub_joint_cmd.publish(cmd_msg)
                rospy.sleep(0.2)

        else:
            self.postureProxy.goToPosture("Stand", 0.2)

            self.motionProxy.setBreathEnabled('Body', False)
            self.motionProxy.setBreathEnabled('Head', False)

            if self.enable_leaning_forward:
                self.motionProxy.setBreathEnabled('Legs', False)
Example #3
0
def larm():
    names = ['LShoulderPitch', 'LShoulderRoll', 'LElbowYaw', 'LElbowRoll']
    pub = rospy.Publisher('/joint_angles', JointAnglesWithSpeed, queue_size=1)
    rospy.init_node('nao_larm', anonymous=True)
    rate = rospy.Rate(10)  # 10hz

    with open(filename, 'rU') as csvfile:
        csvreader = csv.reader(csvfile)
        for row in csvreader:
            cols = []
            angles = []
            for col in row:
                cols.append(col)
            for i in range(2, 6):
                angles.append(float(cols[i]))

            print names, angles
            msg = JointAnglesWithSpeed()
            msg.joint_names = names
            msg.joint_angles = angles
            msg.speed = 1.0

            msg.header.frame_id = 'Fixed Frame'
            pub.publish(msg)
            rate.sleep()
Example #4
0
 def publish_joints(self, names, angles):
     m = JointAnglesWithSpeed()
     m.header.stamp = rospy.Time.now()
     m.joint_names = names
     m.joint_angles = angles
     m.relative = 0
     m.speed = 1
     self.__pub_joints.publish(m)
Example #5
0
	def callback_waving(self, location):
		'''
		this function takes the information that waving has been detected and decides whether Pepper has to turn his head
		(when the waving is not in the center of view)
		or if he publishes the currect head
		'''
		a,b=location.data.split(" ")
		self.counter2 = 0
		locationlist=[int(a),int(b)]
		if locationlist[0] < 0 and locationlist[1] < 0:
			newpos= abs(locationlist[0]-locationlist[1])
			if newpos > 10:
				newpos = 10
                        msg = JointAnglesWithSpeed()
                        msg.joint_names=["HeadYaw"]
			msg.joint_angles=[newpos*almath.TO_RAD]
			msg.speed=0.05
			msg.relative=1
			self.joint_angles_pub.publish(msg)




		elif locationlist[0] > 0 and locationlist[1] > 0:
			newpos= abs(locationlist[0]-locationlist[1])
			if newpos > 10:
				newpos = 10
			msg = JointAnglesWithSpeed()
                        msg.joint_names=["HeadYaw"]
			msg.joint_angles=[-newpos*almath.TO_RAD]
			msg.speed=0.05
			msg.relative=1
			self.joint_angles_pub.publish(msg)

		else:
			self.counter = self.counter +1
			
			
			#this is supposed to tell you that a waving person has been found in the direction he's looking into
			#the numbers at the end of the sent String are supposed to tell you the current angle of Peppers head, not the direction he's supposed to turn to
			#it uses a type of costum message that hasn't been defined yet
			
			msg = PositionCommand()
			msg.command= "go"
			msg.location= "waving " + str(self.position)
			self.go_to_pub.publish(msg)
Example #6
0
    def set_joint_angles(self, joint_name, angle_value):

        joint_angles_to_set = JointAnglesWithSpeed()
        joint_angles_to_set.joint_names.append(joint_name) # each joint has a specific name, look into the joint_state topic or google
        joint_angles_to_set.joint_angles.append(angle_value) # the joint values have to be in the same order as the names!!
        joint_angles_to_set.relative = False # if true you can increment positions
        joint_angles_to_set.speed = 0.1 # keep this low if you can
        self.jointPub.publish(joint_angles_to_set)
 def stand(self):
     print "STANDING"
     self.__call_service(
         "/naoqi_driver/robot_posture/go_to_posture", GoToPosture,
         GoToPostureRequest(GoToPostureRequest.STAND_INIT, 0.5))
     j = JointAnglesWithSpeed()
     j.joint_names = ['HeadYaw', 'HeadPitch']
     j.joint_angles = [0., -.5]
     j.speed = .05
     self.joints.publish(j)
Example #8
0
 def Elbow_yaw_R_moveto(self, arm_angle):  # controls right elbow movement
     joint_angles_to_set = JointAnglesWithSpeed()
     joint_angles_to_set.joint_names.append(
         "RElbowYaw"
     )  # each joint has a specific name, look into the joint_state topic or google
     joint_angles_to_set.joint_angles.append(
         arm_angle
     )  # the joint values have to be in the same order as the names!!
     joint_angles_to_set.relative = False  # if true you can increment positions
     joint_angles_to_set.speed = 0.1  # keep this low if you can
     self.jointPub.publish(joint_angles_to_set)
Example #9
0
 def Shoulder_roll_L_moveto(self, angle):  # controls left shoulder movement
     joint_angles_to_set = JointAnglesWithSpeed()
     joint_angles_to_set.joint_names.append(
         "LShoulderRoll"
     )  # each joint has a specific name, look into the joint_state topic or google
     joint_angles_to_set.joint_angles.append(
         angle
     )  # the joint values have to be in the same order as the names!!
     joint_angles_to_set.relative = False  # if true you can increment positions
     joint_angles_to_set.speed = 0.1  # keep this low if you can
     self.jointPub.publish(joint_angles_to_set)
Example #10
0
    def set_head_angles(self, head_angle):  ## controls head movement

        joint_angles_to_set = JointAnglesWithSpeed()
        joint_angles_to_set.joint_names.append(
            "HeadYaw"
        )  # each joint has a specific name, look into the joint_state topic or google
        joint_angles_to_set.joint_angles.append(
            head_angle
        )  # the joint values have to be in the same order as the names!!
        joint_angles_to_set.relative = False  # if true you can increment positions
        joint_angles_to_set.speed = 0.1  # keep this low if you can
        self.jointPub.publish(joint_angles_to_set)
        joint_angles_to_set.joint_names.append("HeadPitch")
        joint_angles_to_set.joint_angles.append(head_angle)
        self.jointPub.publish(joint_angles_to_set)
    def handle_gaze_controller(self, event):
        # try:
        with self.lock:
            try:
                aaa = PointStamped()
                aaa.header.stamp = rospy.Time()
                aaa.header.frame_id = self.target.target_point.header.frame_id
                aaa.point.x = self.target.target_point.point.x
                aaa.point.y = self.target.target_point.point.y
                aaa.point.z = self.target.target_point.point.z
                point_transformed = self.tf_buf.transform(aaa, 'gaze')
            except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException):
                e = sys.exc_info()[0]
                rospy.logdebug(e)
                rospy.logwarn("[%s] Can't tranform from gaze to target.[ %s - %s ]"%(rospy.get_name(), 'gaze', self.target.target_point.header.frame_id))
                return

        pan_angle = math.atan2(point_transformed.point.y, point_transformed.point.x)
        tilt_angle = math.atan2(point_transformed.point.z, point_transformed.point.x)

        delta_pan_angle = pan_angle
        delta_tilt_angle = -tilt_angle

        cmd_pan_angle = 0.0
        cmd_tilt_angle = 0.0

        if abs(delta_pan_angle) < (2.0 * math.pi / 180.0):
            cmd_pan_angle = 0.0
        else:
            cmd_pan_angle = 0.3 * delta_pan_angle

        if abs(delta_tilt_angle) < (2.0 * math.pi / 180.0):
            cmd_tilt_angle = 0.0
        else:
            cmd_tilt_angle = 0.3 * delta_tilt_angle


        cmd_msg = JointAnglesWithSpeed()
        cmd_msg.header.stamp = rospy.Time.now()
        cmd_msg.joint_names.append('HeadYaw')
        cmd_msg.joint_angles.append(cmd_pan_angle)
        cmd_msg.joint_names.append('HeadPitch')
        cmd_msg.joint_angles.append(cmd_tilt_angle)
        cmd_msg.speed = self.target.max_speed
        cmd_msg.relative = 1

        self.pub_gaze_cmd.publish(cmd_msg)
 def idle(self):
     j = JointAnglesWithSpeed()
     j.joint_names = ['HeadYaw', 'HeadPitch']
     j.joint_angles = [.5, -.5]
     j.speed = .05
     pub = rospy.Publisher("/joint_angles",
                           JointAnglesWithSpeed,
                           queue_size=10)
     while not rospy.is_shutdown() and self.is_idle:
         try:
             rospy.wait_for_message("/naoqi_driver_node/people_detected",
                                    PersonDetectedArray,
                                    timeout=5.)
         except rospy.ROSException, rospy.ROSInterruptException:
             #                j.joint_angles[0] *= -1.
             #                j.joint_angles[1] = np.random.rand()-.5
             j.joint_angles = [
                 np.random.rand() - .5, -(np.random.rand() * .6)
             ]
             pub.publish(j)
Example #13
0
    def track_bounding_box(self, polygon):
        self.hunt_initiated = True

        # Set the timeout counter to 2 seconds
        self.lost_object_counter = 20
        
        # Velocities message initialization
        joint = JointAnglesWithSpeed()
        joint.joint_names.append("HeadYaw")
        joint.joint_names.append("HeadPitch")
        joint.speed = 0.1
        joint.relative = True

        # Get center of detected object and calculate the head turns
        target_x = polygon.points[0].x + 0.5 * polygon.points[1].x
        target_y = polygon.points[0].y + 0.5 * polygon.points[1].y

        sub_x = target_x - 320 / 2.0
        sub_y = target_y - 240 / 2.0

        var_x = (sub_x / 160.0)
        var_y = (sub_y / 120.0)

        joint.joint_angles.append(-var_x * 0.05)
        joint.joint_angles.append(var_y * 0.05)

        ans = self.rh.humanoid_motion.getJointAngles(['HeadYaw', 'HeadPitch'])
        head_pitch = ans['angles'][1]
        head_yaw = ans['angles'][0]

        # Get the sonar measurements
        sonars = self.rh.sensors.getSonarsMeasurements()

        # Check if NAO is close to an obstacle
        if sonars['sonars']['front_left'] <= 0.3 or sonars['sonars']['front_right'] <= 0.3:
            self.lock_motion = True
            rospy.loginfo("Locked due to sonars")
        # Check if NAOs head looks way too down or up
        elif head_pitch >= 0.4 or head_pitch <= -0.4:
            self.lock_motion = True
            rospy.loginfo("Locked due to head pitch")
        # Else approach the object
        elif self.lock_motion is False:
            self.theta_vel = head_yaw * 0.1
            if -0.2 < head_yaw < 0.2:
                print "Approaching"
                self.x_vel = 0.5
                self.y_vel = 0.0
            else:
                self.x_vel = 0.0
                self.y_vel = 0.0
                print "Centering"
            self.joint_pub.publish(joint)

        # Check the battery levels
        batt = self.rh.sensors.getBatteryLevels()
        battery = batt['levels'][0]
        if battery < 25:
            self.rh.audio.setVolume(100)
            self.rh.audio.speak("My battery is low")
            self.predator_sub.unregister()
            self.rh.humanoid_motion.goToPosture("Sit", 0.7)
            self.rh.motion.disableMotors()
            sys.exit(1)
Example #14
0
    def callback_camera(self, ros_data):

        if self.tracking == 0:

            # When te ball is detected, save the desired frame and process the image to obtain de HSV values
            self.cv_image = self.bridge.imgmsg_to_cv2(
                ros_data, desired_encoding="passthrough")

        elif self.tracking == 1:

            # Use the bridge to convert the ROS Image message to OpenCV message
            cv_image = self.bridge.imgmsg_to_cv2(
                ros_data, desired_encoding="passthrough")

            # convert the cv_image to the HSV color space
            hsv = cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)

            # construct a mask for the color "colour", then perform a series of dilations and erosions to remove any small blobs left in the mask
            mask = cv2.inRange(hsv, self.colourLower, self.colourUpper)
            mask = cv2.erode(mask, None, iterations=2)
            mask = cv2.dilate(mask, None, iterations=2)

            # find contours in the mask and initialize the current (x, y) center of the ball
            cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
                                    cv2.CHAIN_APPROX_SIMPLE)[-2]
            center = None

            # only proceed if at least one contour was found
            if len(cnts) > 0:
                # find the largest contour in the mask, then use it to compute the minimum enclosing circle and centroid
                c = max(cnts, key=cv2.contourArea)
                ((x, y), radius) = cv2.minEnclosingCircle(c)
                M = cv2.moments(c)
                center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))

                # only proceed if the radius meets a minimum size
                if radius > 10:
                    # draw the circle and centroid on the cv_image
                    cv2.circle(cv_image, (int(x), int(y)), int(radius),
                               (0, 255, 255), 2)
                    cv2.circle(cv_image, center, 5, (0, 0, 255), -1)

            # Use the bridge to convert the OpenCV message to ROS Image message. Publish the modified image on a new topic
            ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8")
            self.publisher.publish(ros_image)

            # Work out the head movement required to keep the ball in the image center
            # Velocities message initialization
            joint = JointAnglesWithSpeed()
            joint.joint_names.append("HeadYaw")
            joint.joint_names.append("HeadPitch")
            joint.speed = 0.1
            joint.relative = True  #True

            # Get center of detected object and calculate the head turns
            target_x = center[0]  # centroid coordenate x
            target_y = center[1]  # centroid coordenate y

            # Image center -> (cv_image.shape[1]/2, cv_image.shape[0]/2)=(320/2,240/2)    (x,y) coordenates
            sub_x = target_x - cv_image.shape[1] / 2
            sub_y = target_y - cv_image.shape[0] / 2

            var_x = (sub_x / float(cv_image.shape[1] / 2))
            var_y = (sub_y / float(cv_image.shape[0] / 2))

            # Check the Head position before moving it to restrict its movement and prevent it from crashing with the shoulders

            # Predict the new Y relative position posicion, the one to be restricted to avoid crashes
            new_position_y = self.positions[1] + var_y * 0.10

            if (new_position_y <= -0.45):
                var_y = 0
            elif (new_position_y >= 0.3):
                var_y = 0

            joint.joint_angles.append(-var_x * 0.25)
            joint.joint_angles.append(var_y * 0.10)

            self.joint_pub.publish(joint)

            # Movement towards the ball (walk)
            walk = Twist()

            print radius
            print self.positions[0]  # HeadYaw <-->
            if radius < 50:  # Walk forward if the ball is far. When radius = 50, Nao is aprox. 25cm from the ball
                # Walk
                if self.positions[0] < (-0.3):
                    walk.linear.x = 0.3
                    walk.linear.y = -0.3

                elif (self.positions[0] >= -0.3) and (self.positions[0] < 0.3):
                    walk.linear.x = 0.3
                    walk.linear.y = 0.0

                elif self.positions[0] >= 0.3:
                    walk.linear.x = 0.3
                    walk.linear.y = 0.3

                walk.linear.z = 0.0

                walk.angular.x = 0.0
                walk.angular.y = 0.0
                walk.angular.z = 0.0

            else:
                # Stop
                walk.linear.x = 0.0
                walk.linear.y = 0.0
                walk.linear.z = 0.0

                walk.angular.x = 0.0
                walk.angular.y = 0.0
                walk.angular.z = 0.0

            self.walk_pub.publish(walk)
Example #15
0
    def callback_tactile(self, data):
        rospy.loginfo("I heard %u %u" % (data.button, data.state))

        if data.button == 1 and data.state == 1:
            joint_pub = rospy.Publisher('/joint_angles',
                                        JointAnglesWithSpeed,
                                        queue_size=10)

            rospy.loginfo("Pulsado %u , pasamos" % (data.button))
            joint = JointAnglesWithSpeed()
            joint.joint_names.append("LShoulderRoll")
            joint.joint_angles.append(-0.3)
            joint.joint_names.append("LShoulderPitch")
            joint.joint_angles.append(-0.2)
            joint.joint_names.append("LElbowYaw")
            joint.joint_angles.append(-1)
            joint.joint_names.append("LElbowRoll")
            joint.joint_angles.append(0)
            joint.joint_names.append("LWristYaw")
            joint.joint_angles.append(-0.5)
            joint.joint_names.append("LHand")
            joint.joint_angles.append(0)

            joint.joint_names.append("RShoulderRoll")
            joint.joint_angles.append(0.3)
            joint.joint_names.append("RShoulderPitch")
            joint.joint_angles.append(-0.2)
            joint.joint_names.append("RElbowYaw")
            joint.joint_angles.append(1)
            joint.joint_names.append("RElbowRoll")
            joint.joint_angles.append(0)
            joint.joint_names.append("RWristYaw")
            joint.joint_angles.append(0.5)
            joint.joint_names.append("RHand")
            joint.joint_angles.append(0)

            joint.joint_names.append("HeadYaw")
            joint.joint_angles.append(0)
            joint.joint_names.append("HeadPitch")
            joint.joint_angles.append(0)

            joint.speed = 0.1
            joint.relative = False

            joint_pub.publish(joint)

            # Deactivate tracking mode because the HSV range recongition is in process
            self.tracking = 0

            print "Tracking deactivated"

        elif data.button == 2 and data.state == 1:
            # Save a frame from the camera to detect the ball colour
            output = self.cv_image.copy(
            )  # draw our detected circles without destroying the original image.
            gray = cv2.cvtColor(
                self.cv_image, cv2.COLOR_BGR2GRAY
            )  # the cv2.HoughCircles function requires an 8-bit, single channel image, so we'll go ahead and convert from the RGB color space to grayscale

            # detect circles in the image
            circles = cv2.HoughCircles(gray,
                                       cv2.cv.CV_HOUGH_GRADIENT,
                                       1.2,
                                       100,
                                       param2=80,
                                       minRadius=50,
                                       maxRadius=200)

            if circles == None:
                print 'No ball detected'
            else:
                print circles
                # ensure at least some circles were found

            if circles is not None:
                # convert the (x, y) coordinates and radius of the circles to integers allowing us to draw them on our output image
                circles = np.round(circles[0, :]).astype("int")

                # loop over the (x, y) coordinates and radius of the circles
                for (x, y, r) in circles:
                    # draw the circle in the output image, then draw a rectangle
                    # corresponding to the center of the circle
                    cv2.circle(output, (x, y), r, (0, 255, 0), 4)
                    cv2.rectangle(output, (x - 45, y - 45), (x + 45, y + 45),
                                  (0, 128, 255), -1)

                hsv = cv2.cvtColor(self.cv_image, cv2.COLOR_BGR2HSV)

                # Calculate the HSV medium values inside a  rectangle near to the ball center.
                s = [0, 0, 0]
                for i in range(y - 45, y + 46):
                    for j in range(x - 45, x + 46):
                        s = s + hsv[i, j]
                s = s / ((45 + 46) * (45 + 46))
                print s

                # Calculate the upper and lower limits from the HSV medium values
                if s[1] < 100:  # Saturation<100
                    if (s[0] - 10) < 0:
                        self.colourLower = (0, 40, 40)
                    else:
                        self.colourLower = (s[0] - 10, 40, 40)
                else:
                    if (s[0] - 10) < 0:
                        self.colourLower = (0, 100, 100)
                    else:
                        self.colourLower = (s[0] - 10, 100, 100)

                self.colourUpper = (s[0] + 10, 255, 255)

                print self.colourUpper, self.colourLower

        elif data.button == 3 and data.state == 1:
            joint_pub = rospy.Publisher('/joint_angles',
                                        JointAnglesWithSpeed,
                                        queue_size=10)

            rospy.loginfo("Pulsado %u , pasamos" % (data.button))
            joint = JointAnglesWithSpeed()
            joint.joint_names.append("LShoulderRoll")
            joint.joint_angles.append(0)
            joint.joint_names.append("LShoulderPitch")
            joint.joint_angles.append(1.3)
            joint.joint_names.append("LElbowYaw")
            joint.joint_angles.append(0)
            joint.joint_names.append("LElbowRoll")
            joint.joint_angles.append(0)
            joint.joint_names.append("LWristYaw")
            joint.joint_angles.append(0)
            joint.joint_names.append("LHand")
            joint.joint_angles.append(0)

            joint.joint_names.append("RShoulderRoll")
            joint.joint_angles.append(0)
            joint.joint_names.append("RShoulderPitch")
            joint.joint_angles.append(1.3)
            joint.joint_names.append("RElbowYaw")
            joint.joint_angles.append(0)
            joint.joint_names.append("RElbowRoll")
            joint.joint_angles.append(0)
            joint.joint_names.append("RWristYaw")
            joint.joint_angles.append(0)
            joint.joint_names.append("RHand")
            joint.joint_angles.append(0)

            joint.speed = 0.1
            joint.relative = False

            joint_pub.publish(joint)

            # colour range detected. Activate tracking mode
            self.tracking = 1

            print "Tracking ON"
Example #16
0
  def callback_camera(self, ros_data):
    
     # When te ball is detected, save the desired frame and process the image to obtain de HSV values
    if self.tracking == 0:
 
      self.cv_image = self.bridge.imgmsg_to_cv2(ros_data, desired_encoding="passthrough")
  
    elif self.tracking == 1:
      
      # Use the bridge to convert the ROS Image message to OpenCV message
      cv_image = self.bridge.imgmsg_to_cv2(ros_data, desired_encoding="passthrough")
	  
      # convert the cv_image to the HSV color space
      hsv = cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV) 

      # construct a mask for the color "colour", then perform a series of dilations and erosions to remove any small blobs left in the mask
      mask = cv2.inRange(hsv, self.colourLower, self.colourUpper)
      mask = cv2.erode(mask, None, iterations=2) 
      mask = cv2.dilate(mask, None, iterations=2) 

      # find contours in the mask and initialize the current (x, y) center of the ball
      cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
	cv2.CHAIN_APPROX_SIMPLE)[-2]
      center = None

      # only proceed if at least one contour was found
      if len(cnts) > 0:
	# find the largest contour in the mask, then use it to compute the minimum enclosing circle and centroid
	c = max(cnts, key=cv2.contourArea)
	((x, y), radius) = cv2.minEnclosingCircle(c)
	M = cv2.moments(c)
	center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))
	
	# only proceed if the radius meets a minimum size
	if radius > 10:
	  # draw the circle and centroid on the cv_image
	  cv2.circle(cv_image, (int(x), int(y)), int(radius),(0, 255, 255), 2)
	  cv2.circle(cv_image, center, 5, (0, 0, 255), -1)
	  	  
       # Use the bridge to convert the OpenCV message to ROS Image message. Publish the modified image on a new topic
      ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8")
      self.publisher.publish(ros_image)

      # Work out the head movement required to keep the ball in the image center
      # Velocities message initialization
      joint = JointAnglesWithSpeed()
      joint.joint_names.append("HeadYaw")
      joint.joint_names.append("HeadPitch")
      joint.speed = 0.1
      joint.relative = True#True
	  
      # Get center of detected object and calculate the head turns	
      target_x =center[0] # centroid coordenate x
      target_y=center[1] # centroid coordenate y

      # Image center -> (cv_image.shape[1]/2, cv_image.shape[0]/2)=(320/2,240/2)    (x,y) coordenates
      sub_x = target_x - cv_image.shape[1]/2
      sub_y = target_y - cv_image.shape[0]/2

      var_x = (sub_x / float(cv_image.shape[1]/2))
      var_y = (sub_y / float(cv_image.shape[0]/2))
	  
      # Check the Head position before moving it to restrict its movement and prevent it from crashing with the shoulders
	  
      # Predict the new Y relative position posicion, the one to be restricted to avoid crashes
      new_position_y=self.positions[1]+var_y*0.10
	  
      if (new_position_y <= -0.45 ): 
	var_y=0
      elif (new_position_y >= 0.3):
	var_y=0
	    
      joint.joint_angles.append(-var_x*0.25) 
      joint.joint_angles.append(var_y*0.10) 
	
      self.joint_pub.publish(joint)
      
      # Movement towards the ball (walk)
      walk=Twist()
      
      
      print " Radius is (px): ", radius
      print "HeadPitch: ", self.positions[1]
      print "HeadYaw: ", self.positions[0] # HeadYaw <-->
      if radius < 58: # Walk forward if the ball is far. When radius = 50, Nao is aprox. 25cm from the ball
	
	# Walk
	if self.positions[0] < (-0.3):
	  walk.linear.x=0.2
	  walk.angular.z=-0.25
	  
	elif (self.positions[0] >= -0.3) and (self.positions[0] < 0.3):
	  walk.linear.x=0.3
	  walk.angular.z=0.0
	
	elif self.positions[0] >= 0.3:
	  walk.linear.x=0.2
	  walk.angular.z=0.25
	
	walk.linear.y=0.0
	walk.linear.z=0.0
	
	walk.angular.x=0.0
	walk.angular.y=0.0
	
	self.walk_pub.publish(walk)
	
	
      elif radius > 68: # If it is too close, walk backwards
	walk.linear.x=-0.2
	walk.linear.y=0.0  	  
	walk.linear.z=0.0
	
	walk.angular.x=0.0
	walk.angular.y=0.0
	walk.angular.z=0.0
	
	self.walk_pub.publish(walk)
	print "Walk backwards"
      
      else:

	CabezaCentrada=False
	PelotaCentro=False
	
	# Check if the ball is in front of Nao, if not: Nao walks to the corresponding side
	# Check it the head is aligned with the body, and that is not turned
	if self.positions[0] < (-0.3) or self.positions[0] > 0.3: # Use a proportional controller to move with more precision
	  walk.linear.y=0.0
	  e=0-self.positions[0]
	  walk.angular.z=e*(-0.5)

	elif self.positions[0] <  (-0.05): # HeadYaw <-->
	  walk.linear.y=-0.3
	  walk.angular.z=-0.0
	  
	elif self.positions[0] > 0.05:
	  walk.linear.y=0.3
	  walk.angular.z=0.0

	else: # Stop
	  CabezaCentrada=True
	  
	  walk.linear.y=0.0
	  walk.angular.z=0.0
	  print " Head centered "
	
	  walk.linear.x=0.0  	  
	  walk.linear.z=0.0
	
	  walk.angular.x=0.0
	  walk.angular.y=0.0
	
	  self.walk_pub.publish(walk)
	  
	  rospy.sleep(0.3) # Wait some time for the robot to become steady
#
	  if center[0]< 152 or center [0]> 168: # Check if the ball is in the image center looking at its X coordinate
	    e=160-center[0]
	    walk.linear.y=e*(-0.005)
	    walk.angular.z=-0.0
	  else:	# Parar
	    PelotaCentro=True
	    walk.linear.y=0.0
	    walk.angular.z=0.0
	    print " Ball centered "
	
	walk.linear.x=0.0  	  
	walk.linear.z=0.0
	
	walk.angular.x=0.0
	walk.angular.y=0.0
	
	self.walk_pub.publish(walk)
	
	print " Center is: ", center
	print "Pelota Centro: ", PelotaCentro
	print "Cabeza Centrada: ", CabezaCentrada
	
	if PelotaCentro and CabezaCentrada:
	
	  self.image_sub.unregister()

	  # Subscribe to another callback
	  self.image_sub = rospy.Subscriber("/nao_robot/camera/top/camera/image_raw", Image, self.callback_camera2)
Example #17
0
  def callback_camera2(self, ros_data):
    print "callback camera 2: centrado de pelota en la imagen"
    

    # Use the bridge to convert the ROS Image message to OpenCV message
    cv_image = self.bridge.imgmsg_to_cv2(ros_data, desired_encoding="passthrough")
	  
    # convert the cv_image to the HSV color space
    hsv = cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV) 

    # construct a mask for the color "colour", then perform a series of dilations and erosions to remove any small blobs left in the mask
    mask = cv2.inRange(hsv, self.colourLower, self.colourUpper)
    mask = cv2.erode(mask, None, iterations=2) 
    mask = cv2.dilate(mask, None, iterations=2) 

    # find contours in the mask and initialize the current (x, y) center of the ball
    cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
      cv2.CHAIN_APPROX_SIMPLE)[-2]
    center = None

    # only proceed if at least one contour was found
    if len(cnts) > 0:
    # find the largest contour in the mask, then use it to compute the minimum enclosing circle and centroid
      c = max(cnts, key=cv2.contourArea)
      ((x, y), radius) = cv2.minEnclosingCircle(c)
      M = cv2.moments(c)
      center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))
	
      # only proceed if the radius meets a minimum size
      if radius > 10:
	# draw the circle and centroid on the cv_image
	cv2.circle(cv_image, (int(x), int(y)), int(radius),(0, 255, 255), 2)
	cv2.circle(cv_image, center, 5, (0, 0, 255), -1)
	  	  
    # Use the bridge to convert the OpenCV message to ROS Image message. Publish the modified image on a new topic
    ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8")
    self.publisher.publish(ros_image)

    if center[0]<155 or center[0]>165: # Check X coordinate
      self.contador=0 # Initialize counter
      # Work out the head movement required to keep the ball in the image center
      # Velocities message initialization
      joint = JointAnglesWithSpeed()
      joint.joint_names.append("HeadYaw")
      joint.joint_names.append("HeadPitch")
      joint.speed = 0.1
      joint.relative = True
	  
      # Get center of detected object and calculate the head turns	
      target_x =center[0] # centroid coordenate x
      target_y=center[1] # centroid coordenate y

      # Image center -> (cv_image.shape[1]/2, cv_image.shape[0]/2)=(320/2,240/2)    (x,y) coordenates
      sub_x = target_x - cv_image.shape[1]/2
      sub_y = target_y - cv_image.shape[0]/2

      var_x = (sub_x / float(cv_image.shape[1]/2))
      var_y = (sub_y / float(cv_image.shape[0]/2))
	  
      # Check the Head position before moving it to restrict its movement and prevent it from crashing with the shoulders
	  
      # Predict the new Y relative position posicion, the one to be restricted to avoid crashes
      new_position_y=self.positions[1]+var_y*0.15

      if (new_position_y <= -0.45 ): 
	var_y=0
      elif (new_position_y >= 0.3):
	var_y=0
	    
      joint.joint_angles.append(-var_x*0.10) #0.25
      joint.joint_angles.append(var_y*0.15) 
	
      self.joint_pub.publish(joint)
    else:
      print "Centrado respecto a X"
      if center[1]<115 or center[1]>125: # Check Y coordinate
	self.contador=0
	# Velocities message initialization
	joint = JointAnglesWithSpeed()
	#joint.joint_names.append("HeadYaw")
	joint.joint_names.append("HeadPitch")
	joint.speed = 0.1
	joint.relative = True#True
	  
	if center[1]<115:
	  var_y=-0.01
	elif center[1]>125:
	  var_y=0.01
	  
	joint.joint_angles.append(var_y)
	
	self.joint_pub.publish(joint)
      else:
	# Counter to ensure the robot is centeredr and it was nota coincidence
	self.contador=self.contador + 1
	print "Centrado respecto a Y"
	print "Contador: ", self.contador
	if self.contador > 10:
	  self.image_sub.unregister()
	  print "Centrado y salimos"
	  print "HeadPitch: ", self.positions[1] # HeadPitch
	  print "HeadYaw: ", self.positions[0] # HeadYaw <--> 
	  print " Center is: ", center
	  
	  # Obtain position
	  Dreal=0.08 # Real Ball diameter (meters)
	  pixelToMeter=Dreal/(radius*2) 
      
	  Yrobot=(cv_image.shape[1]/2 - center[0])*pixelToMeter 
	
	  radiusToMeters=0.25*55 # Distance: 0.25m -> radius = 55px 
	  Xrobot=radiusToMeters/radius
	  
	  # Calculate Z coordinate with the equation that connects it with HeadPitch
	  Zrobot=(-25.866*self.positions[1]+23.843)/100 # Divide by 100 to convert centimeters to meters
	  
	  # Create point
	  point = Point()
	  point.x = Xrobot
	  point.y = Yrobot
	  point.z = Zrobot
	
	  self.pointPub.publish(point)
	  print "Punto publicado: ", point
    
    print " El radio son (px): ", radius
    print "HeadPitch: ", self.positions[1]
    print "HeadYaw: ", self.positions[0] # HeadYaw <--> 
    print " Center is: ", center
Example #18
0
        message_arrived=False
        rospy.init_node('talker6')
        rospy.Subscriber('/camshift/track_box',RotatedRectStamped,talker6)
        rospy.Subscriber('/pepper_robot/head_touch',HeadTouch,headtouch_cb)
        pub=rospy.Publisher('/pepper_robot/pose/joint_angles',JointAnglesWithSpeed,queue_size=1)
        #pub=rospy.Publisher('/pepper_robot/pose/joint_angles',JointAnglesWithSpeed,queue_size=1)
        rospy.sleep(1) #omajinai
        while not rospy.is_shutdown():
            hello=JointAnglesWithSpeed();
            #if message_arrived:
            if  head_touch_message_arrived and head_touch_message_arrived.button==1 and head_touch_message_arrived.state==1:
                print("message_arrived")
                if True:#(message_arrived.rect.size.width*message_arrived.rect.size.height>4000):
                    hello.joint_names.append("LShoulderPitch")
                    hello.joint_names.append("RShoulderPitch")

                    hello.speed=0.05
                    #hello.relative=0
                    hello.joint_angles=[-0.05,-0.05]

                    print(message_arrived)

            message_arrived=False
            pub.publish(hello);

            rospy.sleep(1)
            #sys.exit(0)

    except rospy.ROSInterruptException:
        pass
Example #19
0
    def track_bounding_box(self, polygon):
        self.hunt_initiated = True

        # Set the timeout counter to 2 seconds
        self.lost_object_counter = 20

        # Velocities message initialization
        joint = JointAnglesWithSpeed()
        joint.joint_names.append("HeadYaw")
        joint.joint_names.append("HeadPitch")
        joint.speed = 0.1
        joint.relative = True

        # Get center of detected object and calculate the head turns
        target_x = polygon.points[0].x + 0.5 * polygon.points[1].x
        target_y = polygon.points[0].y + 0.5 * polygon.points[1].y

        sub_x = target_x - 320 / 2.0
        sub_y = target_y - 240 / 2.0

        var_x = (sub_x / 160.0)
        var_y = (sub_y / 120.0)

        joint.joint_angles.append(-var_x * 0.05)
        joint.joint_angles.append(var_y * 0.05)

        ans = self.rh.humanoid_motion.getJointAngles(['HeadYaw', 'HeadPitch'])
        head_pitch = ans['angles'][1]
        head_yaw = ans['angles'][0]

        # Get the sonar measurements
        sonars = self.rh.sensors.getSonarsMeasurements()

        # Check if NAO is close to an obstacle
        if sonars['sonars']['front_left'] <= 0.3 or sonars['sonars'][
                'front_right'] <= 0.3:
            self.lock_motion = True
            rospy.loginfo("Locked due to sonars")
        # Check if NAOs head looks way too down or up
        elif head_pitch >= 0.4 or head_pitch <= -0.4:
            self.lock_motion = True
            rospy.loginfo("Locked due to head pitch")
        # Else approach the object
        elif self.lock_motion is False:
            self.theta_vel = head_yaw * 0.1
            if -0.2 < head_yaw < 0.2:
                print "Approaching"
                self.x_vel = 0.5
                self.y_vel = 0.0
            else:
                self.x_vel = 0.0
                self.y_vel = 0.0
                print "Centering"
            self.joint_pub.publish(joint)

        # Check the battery levels
        batt = self.rh.sensors.getBatteryLevels()
        battery = batt['levels'][0]
        if battery < 25:
            self.rh.audio.setVolume(100)
            self.rh.audio.speak("My battery is low")
            self.predator_sub.unregister()
            self.rh.humanoid_motion.goToPosture("Sit", 0.7)
            self.rh.motion.disableMotors()
            sys.exit(1)
Example #20
0
    def callback_camera(self, ros_data):

        if self.tracking == 0:

            self.cv_image = self.bridge.imgmsg_to_cv2(
                ros_data, desired_encoding="passthrough")

        elif self.tracking == 1:

            # Use the bridge to convert the ROS Image message to OpenCV message
            cv_image = self.bridge.imgmsg_to_cv2(
                ros_data, desired_encoding="passthrough")

            # convert the cv_image to the HSV color space
            hsv = cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)

            # construct a mask for the color "colour", then perform a series of dilations and erosions to remove any small blobs left in the mask
            mask = cv2.inRange(hsv, self.colourLower, self.colourUpper)
            mask = cv2.erode(mask, None, iterations=2)
            mask = cv2.dilate(mask, None, iterations=2)

            # find contours in the mask and initialize the current (x, y) center of the ball
            cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
                                    cv2.CHAIN_APPROX_SIMPLE)[-2]
            center = None

            # only proceed if at least one contour was found
            if len(cnts) > 0:
                # find the largest contour in the mask, then use it to compute the minimum enclosing circle and centroid
                c = max(cnts, key=cv2.contourArea)
                ((x, y), radius) = cv2.minEnclosingCircle(c)
                M = cv2.moments(c)
                center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))

                # only proceed if the radius meets a minimum size
                if radius > 10:
                    # draw the circle and centroid on the cv_image
                    cv2.circle(cv_image, (int(x), int(y)), int(radius),
                               (0, 255, 255), 2)
                    cv2.circle(cv_image, center, 5, (0, 0, 255), -1)

            # Conversion of the OpenCV format image to ROS format and publish it in a topic
            ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8")
            self.publisher.publish(ros_image)

            # It calculates the movement of the head necessary to keep the ball in the center of the image
            # Velocities message initialization
            joint = JointAnglesWithSpeed()
            joint.joint_names.append("HeadYaw")
            joint.joint_names.append("HeadPitch")
            joint.speed = 0.1
            joint.relative = True  #True

            # Get center of detected object and calculate the head turns
            target_x = center[0]  # centroid coordenate x
            target_y = center[1]  # centroid coordenate y

            # Image center -> (cv_image.shape[1]/2, cv_image.shape[0]/2)=(320/2,240/2)    (x,y) coordenates
            sub_x = target_x - cv_image.shape[1] / 2
            sub_y = target_y - cv_image.shape[0] / 2

            var_x = (sub_x / float(cv_image.shape[1] / 2))
            var_y = (sub_y / float(cv_image.shape[0] / 2))

            # Check the position of the head before moving it to restrict movement and prevent it from clashing with the shoulders

            # Subscribe to the topic / joint_states and save the first two values ​​(HeadYaw and HeadPitch) in reference variables to compare before performing the move

            # Determine the new relative position in Y, which is the one I restrict to avoid collisions
            new_position_y = self.positions[1] + var_y * 0.10

            if (new_position_y <= -0.45):
                var_y = 0
            elif (new_position_y >= 0.3):
                var_y = 0

            joint.joint_angles.append(-var_x * 0.25)
            joint.joint_angles.append(var_y * 0.10)

            self.joint_pub.publish(joint)

            # Movement towards the ball (walking)
            walk = Twist()

            print " El radio son (px): ", radius
            print "HeadPitch: ", self.positions[1]
            print "HeadYaw: ", self.positions[0]  # HeadYaw <-->
            if radius < 58:  # Approach if far away. If I see radio 50 I am approx. 25cm

                # Walk
                if self.positions[0] < (-0.3):
                    walk.linear.x = 0.2
                    walk.angular.z = -0.25

                elif (self.positions[0] >= -0.3) and (self.positions[0] < 0.3):
                    walk.linear.x = 0.3
                    walk.angular.z = 0.0

                elif self.positions[0] >= 0.3:
                    walk.linear.x = 0.2
                    walk.angular.z = 0.25

                walk.linear.y = 0.0
                walk.linear.z = 0.0

                walk.angular.x = 0.0
                walk.angular.y = 0.0

                self.walk_pub.publish(walk)

            elif radius > 68:  # When Nao is too close to the ball, it walks backwards
                walk.linear.x = -0.2
                walk.linear.y = 0.0
                walk.linear.z = 0.0

                walk.angular.x = 0.0
                walk.angular.y = 0.0
                walk.angular.z = 0.0

                self.walk_pub.publish(walk)
                print " Desplazo atras"

            else:

                CabezaCentrada = False
                PelotaCentro = False
                # Check if the ball is in front of the robot, otherwise it moves to the corresponding side
                # Check that the head is aligned with the body and that it is not turned to any side
                if self.positions[0] < (-0.3) or self.positions[0] > 0.3:
                    walk.linear.y = 0.0
                    e = 0 - self.positions[0]
                    walk.angular.z = e * (-0.5)

                elif self.positions[0] < (-0.05):  # HeadYaw <-->
                    walk.linear.y = -0.3
                    walk.angular.z = -0.0

                elif self.positions[0] > 0.05:
                    walk.linear.y = 0.3
                    walk.angular.z = 0.0

                else:  # Stop walking
                    CabezaCentrada = True

                    walk.linear.y = 0.0
                    walk.angular.z = 0.0
                    print " Cabeza centrada"

                    walk.linear.x = 0.0
                    walk.linear.z = 0.0

                    walk.angular.x = 0.0
                    walk.angular.y = 0.0

                    self.walk_pub.publish(walk)

                    rospy.sleep(0.3)  # For the robot to stabilize

                    if center[0] < 150 or center[
                            0] > 170:  # It is verified that the ball is seen in the center of the image through its X coordinate
                        e = 160 - center[0]
                        walk.linear.y = e * (-0.005)

                        print "Regulador"
                        walk.angular.z = -0.0
                    else:  # Parar
                        PelotaCentro = True
                        walk.linear.y = 0.0
                        walk.angular.z = 0.0
                        print " Pelota en el centro"

                walk.linear.x = 0.0
                walk.linear.z = 0.0

                walk.angular.x = 0.0
                walk.angular.y = 0.0

                self.walk_pub.publish(walk)

                print " Center is: ", center
                print "Pelota Centro: ", PelotaCentro
                print "Cabeza Centrada: ", CabezaCentrada

                if PelotaCentro and CabezaCentrada:

                    self.image_sub.unregister()

                    # Subscribe to other callback
                    self.image_sub = rospy.Subscriber(
                        "/nao_robot/camera/top/camera/image_raw", Image,
                        self.callback_camera2)
Example #21
0
    def main(self):

        ## Configuration parameters
        # Set the language of the speech recognition engine to Spanish:
        self.asrProxy.setLanguage("Spanish")
        # set the language of the synthesis engine to Spanish:
        self.ttsProxy.setLanguage("Spanish")

        # Adds the vocabulary
        vocabulary = [
            "si", "no", "hola", "Nao", "rojo", "azul", "amarillo", "violeta"
        ]
        self.asrProxy.setVocabulary(vocabulary, False)
        # Or, if you want to enable word spotting:
        #self.asrProxy.setVocabulary(vocabulary, True)

        # Wake up robot
        self.motionProxy.wakeUp()

        fractionMaxSpeed = 0.5
        # Go to posture stand
        self.postureProxy.goToPosture("StandInit", fractionMaxSpeed)

        # Introducting the game
        self.ttsProxy.say("Hola! Soy Nao.")

        # Ask the kid to choose one ball (blue, red, yellow)
        self.ttsProxy.say(
            "Elige una de las pelotas y cuando estés listo llámame.")

        # Start sound tracker while the kid is not ready. When the kid says something or claps his hands, Nao turn into his location.
        targetName = "Sound"
        distance = 1
        confidence = 0.4
        parameters = [distance, confidence]
        self.trackerProxy.registerTarget(targetName, parameters)

        mode = "Move"
        self.trackerProxy.setMode(mode)
        # Start tracker.
        self.trackerProxy.track(targetName)

        while True:
            time.sleep(1)
            TargetPosition = self.trackerProxy.getTargetPosition(
                2)  # Frame: 0-Torso, 1-World, 2-Robot
            print "Target Position: ", TargetPosition
            print len(TargetPosition)
            if len(TargetPosition) > 0:
                if TargetPosition[0] > 0 and abs(
                        TargetPosition[1]
                ) < 0.1:  # Exit from the loop when the kid voice is opposite the robot
                    break

        self.trackerProxy.stopTracker()
        self.trackerProxy.unregisterAllTargets()
        self.ttsProxy.say("¡Al fin te encuentro!")

        # Go to posture stand
        self.postureProxy.goToPosture("StandInit", fractionMaxSpeed)

        self.ttsProxy.say("¿Me dejas ver la pelota?")

        repeate_recognition = 1
        count = 0
        while (repeate_recognition == 1):
            self.postureProxy.goToPosture("StandInit", fractionMaxSpeed)
            # Robot posture with hands opened to catch the ball
            #joint_pub = rospy.Publisher('/joint_angles', JointAnglesWithSpeed, queue_size=10)
            rospy.loginfo("Poniendo manos en posición de reconocimiento")
            joint = JointAnglesWithSpeed()
            joint.joint_names.append("LShoulderRoll")
            joint.joint_angles.append(-0.35)
            joint.joint_names.append("LShoulderPitch")
            joint.joint_angles.append(-0.30)
            joint.joint_names.append("LElbowYaw")
            joint.joint_angles.append(-1.00)
            joint.joint_names.append("LElbowRoll")
            joint.joint_angles.append(0.00)
            joint.joint_names.append("LWristYaw")
            joint.joint_angles.append(-1.50)
            joint.joint_names.append("LHand")
            joint.joint_angles.append(1)

            joint.joint_names.append("RShoulderRoll")
            joint.joint_angles.append(0.35)
            joint.joint_names.append("RShoulderPitch")
            joint.joint_angles.append(-0.30)
            joint.joint_names.append("RElbowYaw")
            joint.joint_angles.append(1.00)
            joint.joint_names.append("RElbowRoll")
            joint.joint_angles.append(0.00)
            joint.joint_names.append("RWristYaw")
            joint.joint_angles.append(1.50)
            joint.joint_names.append("RHand")
            joint.joint_angles.append(1)

            joint.joint_names.append("HeadYaw")
            joint.joint_angles.append(0.00)
            joint.joint_names.append("HeadPitch")
            joint.joint_angles.append(0.00)

            joint.speed = 0.1
            joint.relative = False

            self.joint_pub.publish(joint)

            # Wait some time for the kid to put the ball in Nao's hands
            time.sleep(10)  # 10 seconds

            # Take a frame with the ball to recognize the colour
            ball_detected = 0
            while (ball_detected == 0):
                output = self.cv_image.copy(
                )  # Draw the detected circles without destroying the original image.
                gray = cv2.cvtColor(
                    self.cv_image, cv2.COLOR_BGR2GRAY
                )  # The cv2.HoughCircles function requires an 8-bit, single channel image, so we'll go ahead and convert from the RGB color space to grayscale

                # Detect circles in the image
                circles = cv2.HoughCircles(gray,
                                           cv2.cv.CV_HOUGH_GRADIENT,
                                           1.2,
                                           100,
                                           param2=80,
                                           minRadius=50,
                                           maxRadius=200)

                if circles == None:
                    print 'No se ha detectado la pelota'
                    self.ttsProxy.say("No soy capaz de reconocer la pelota")
                    time.sleep(5)
                else:
                    print 'Se ha detectado la pelota: ', circles
                    ball_detected = 1
                    # Ensure at least some circles were found

                if circles is not None:
                    # Convert the (x, y) coordinates and radius of the circles to integers allowing us to draw them on our output image
                    circles = np.round(circles[0, :]).astype("int")

                    # Loop over the (x, y) coordinates and radius of the circles
                    for (x, y, r) in circles:
                        # Draw the circle in the output image, then draw a rectangle
                        # Corresponding to the center of the circle
                        cv2.circle(output, (x, y), r, (0, 255, 0), 4)
                        cv2.rectangle(output, (x - 45, y - 45),
                                      (x + 45, y + 45), (0, 128, 255), -1)
                        ###

                    hsv = cv2.cvtColor(self.cv_image, cv2.COLOR_BGR2HSV)

                    # Calculate the mean of the HSV values ​​within a rectangle (side 45) close to the center of the detected ball
                    s = [0, 0, 0]
                    for i in range(y - 45, y + 46):
                        for j in range(x - 45, x + 46):
                            s = s + hsv[i, j]
                            #print j,i, hsv[i,j]
                    s = s / ((45 + 46) * (45 + 46))
                    print s

                    # I calculate the upper and lower limits from the mean HSV
                    if s[1] < 100:  # Saturation < 100
                        if (s[0] - 10) < 0:
                            self.colourLower = (0, 40, 40)
                        else:
                            self.colourLower = (s[0] - 10, 40, 40)
                    else:
                        if (s[0] - 10) < 0:
                            self.colourLower = (0, 100, 100)
                        else:
                            self.colourLower = (s[0] - 10, 100, 100)

                    self.colourUpper = (s[0] + 10, 255, 255)

                    print "Rango de colores. Limites superior e inferior: ", self.colourUpper, self.colourLower
                    # Compare the colour range with the ones in the data base and say to the kid what colour is it
                    if s[0] < 20:  # Red
                        colour = "rojo"
                    elif s[0] > 90 and s[0] < 110:  # Blue
                        colour = "azul"
                    elif s[0] >= 20 and s[0] <= 40:  # Yellow
                        colour = "amarillo"
                    elif s[0] >= 130 and s[0] <= 150:  # Violet
                        colour = "violeta"
                    else:  # The colour is not  in the data base
                        colour = "no"

                    self.ttsProxy.say("Ya la puedes coger")

                    # Wait
                    time.sleep(5)  # 5 seconds

                    # Go to posture stand
                    self.postureProxy.goToPosture("StandInit",
                                                  fractionMaxSpeed)

                    if colour == "no":
                        self.ttsProxy.say("No he visto bien el color")
                        self.ttsProxy.say("¿Me dejas ver de nuevo la pelota?")
                        repeate_recognition = 1
                    else:

                        self.ttsProxy.say(
                            "¿Quieres que siga una pelota de color" + colour +
                            "¿Es eso cierto?")

                        respuesta = 0
                        while (respuesta == 0):
                            # Start the speech recognition engine with user Test_ASR
                            self.asrProxy.subscribe("Test_ASR")
                            self.subscribed = True
                            print 'Speech recognition engine started'
                            time.sleep(5)
                            WordHeard = self.Memory.getData(
                                "WordRecognized")[0]
                            # Stop the speech recognition engine with user Test_ASR
                            self.asrProxy.unsubscribe("Test_ASR")
                            self.subscribed = False
                            print "contador: ", count
                            if WordHeard == "si":
                                respuesta = 1
                                self.ttsProxy.say(
                                    "¡Genial! Comencemos a jugar")
                                repeate_recognition = 0
                            elif WordHeard == "no":
                                respuesta = 1

                                count += 1
                                if count > 2:
                                    self.ttsProxy.say(
                                        "¡Ai ai ai! ¿De qué color es entonces?"
                                    )
                                    repeate_recognition = 0

                                    respuesta2 = 0
                                    while (respuesta2 == 0):
                                        repeate_recognition = 0
                                        # Start the speech recognition engine with user Test_ASR
                                        self.asrProxy.subscribe("Test_ASR")
                                        self.subscribed = True
                                        print 'Speech recognition engine started'
                                        time.sleep(5)
                                        WordHeard = self.Memory.getData(
                                            "WordRecognized")[0]
                                        # Stop the speech recognition engine with user Test_ASR
                                        self.asrProxy.unsubscribe("Test_ASR")
                                        self.subscribed = False
                                        if WordHeard == "rojo":
                                            respuesta2 = 1
                                            colour = "rojo"
                                            self.colourLower = (0, 100, 100)
                                            self.colourUpper = (20, 255, 255)
                                            self.ttsProxy.say(
                                                "Muy bien, seguiré una pelota de color"
                                                + colour)
                                        elif WordHeard == "azul":
                                            respuesta2 = 1
                                            colour = "azul"
                                            self.colourLower = (90, 100, 100)
                                            self.colourUpper = (110, 255, 255)
                                            self.ttsProxy.say(
                                                "Muy bien, seguiré una pelota de color"
                                                + colour)
                                        elif WordHeard == "violeta":
                                            respuesta2 = 1
                                            colour = "violeta"
                                            self.colourLower = (135, 40, 40)
                                            self.colourUpper = (155, 255, 255)
                                            self.ttsProxy.say(
                                                "Muy bien, seguiré una pelota de color"
                                                + colour)
                                        elif WordHeard == "amarillo":
                                            respuesta2 = 1
                                            colour = "amarillo"
                                            self.colourLower = (20, 100, 100)
                                            self.colourUpper = (40, 255, 255)
                                            self.ttsProxy.say(
                                                "Muy bien, seguiré una pelota de color"
                                                + colour)
                                        else:
                                            self.ttsProxy.say(
                                                "Disculpa, tendrás que volver a repetírmelo"
                                            )

                                else:
                                    self.ttsProxy.say(
                                        "¡Ups! Entonces tendré que volver a ver la pelota"
                                    )
                                    repeate_recognition = 1

                            else:
                                self.ttsProxy.say(
                                    "Perdona, no te he entendido bien. ¿Puedes repetirme tu respuesta?"
                                )

        self.postureProxy.goToPosture("StandInit", fractionMaxSpeed)

        print "Tracker activado"
        self.ttsProxy.say(
            "Ahora muévete y enséñame la pelota, que ya voy a por ella")

        self.ttsProxy.say("Cuando quieras que la coja párate")
        self.tracking = 1

        while self.end is not True:
            time.sleep(1)

        self.postureProxy.goToPosture("Stand", fractionMaxSpeed)
        self.ttsProxy.say("¿Quieres volver a jugar?")

        respuesta3 = 0
        while (respuesta3 == 0):
            # Start the speech recognition engine with user Test_ASR
            self.asrProxy.subscribe("Test_ASR")
            self.subscribed = True
            print 'Speech recognition engine started'
            time.sleep(5)
            WordHeard = self.Memory.getData("WordRecognized")[0]
            # Stop the speech recognition engine with user Test_ASR
            self.asrProxy.unsubscribe("Test_ASR")
            self.subscribed = False
            if WordHeard == "si":
                respuesta3 = 1
                self.ttsProxy.say(
                    "¡Genial! Volvamos a jugar con una pelota de otro color")
                self.play_again = True
            elif WordHeard == "no":
                respuesta3 = 1
                self.ttsProxy.say("Joo, ¡Qué pena!")
                self.play_again = False
                raise KeyboardInterrupt
            else:
                self.ttsProxy.say(
                    "Perdona, no te he entendido bien. ¿Puedes repetirme tu respuesta?"
                )
	def track_bounding_box(self, polygon):
		self.hunt_initiated = True
		self.lost_object_counter = 20
		
		joint = JointAnglesWithSpeed()
	
		joint.joint_names.append("HeadYaw")
		joint.joint_names.append("HeadPitch")

		#~ print polygon
		joint.speed = 0.1
		joint.relative = True

		target_x = polygon.points[0].x + 0.5 * polygon.points[1].x
		target_y = polygon.points[0].y + 0.5 * polygon.points[1].y
		
		sub_x = target_x - 320 / 2.0
		sub_y = target_y - 240 / 2.0
 
		var_x = (sub_x / 160.0)
		var_y = (sub_y / 120.0)
		
		joint.joint_angles.append(-var_x * 0.05)
		joint.joint_angles.append(var_y * 0.05)
				
		print self.rh.sensors.getSonarsMeasurements()
		
		ans = self.rh.humanoid_motion.getJointAngles(['HeadYaw', 'HeadPitch'])['angles']
		head_yaw = ans[0]
		head_pitch = ans[1]
		#~ print 'HeadPitch' + str(head_pitch)
		#~ print 'HeadYaw' + str(head_yaw)
		
		
		sonars = self.rh.sensors.getSonarsMeasurements()['sonars']
		
		if (sonars['front_left'] <= self.sonar_value or sonars['front_right'] <= self.sonar_value) and self.lock_motion == False:
			self.lock_motion = True
			rospy.loginfo("Locked due to sonars")
			self.find_distance_with_sonars = True
			
		elif (head_pitch >= self.head_pitch_value or head_pitch <= -self.head_pitch_value) and self.lock_motion == False:
			self.lock_motion = True
			rospy.loginfo("Locked due to head pitch")
			
		print "self.lock_motion:", self.lock_motion
		
		if self.lock_motion is False:
			self.theta_vel = head_yaw * 0.1
			if -self.head_yaw_value < head_yaw < self.head_yaw_value:
				self.x_vel = 0.3
			self.pub.publish(joint)
		else:
			self.x_vel = 0
			self.y_vel = 0
			self.theta_vel = 0
			
		if  self.lock_motion is True:
			#~ self.disableObjectTracking()
			
			print "sonars:", sonars['front_left'], sonars['front_right']
			print "Head_pitch:",head_pitch
			print "Head_yaw:",head_yaw
			print "Sub_x:", sub_x
			print "Sub_y:", sub_y
			
			dx = 0
			sy = 0
			if self.find_distance_with_sonars is True and\
				(sonars['front_left'] <= self.sonar_value or sonars['front_right'] <= self.sonar_value):				
				if (sonars['front_left'] <= sonars['front_right']):
					dx = sonars['front_left']
					sy = +1
				else:
					dx = sonars['front_right']
					sy = -1
			else:
				x = (sub_y * 47.6* 3.14159 / 180) / 240.0
				print "x=", x
				total_x = head_pitch + x + 0.021
				print "total_x=",total_x
				dx = 0.53 / math.tan(total_x)
				sy = -1
				
			print "dx= ",dx
			y = (sub_x * 60.9 * 3.14159 / 180) / 320.0
			print "y=", y
			total_y = head_yaw + sy * y
			print "total_y=",total_y
			dy = dx * math.tan(total_y)
			print "dy= " ,dy
			
			self.disableObjectTracking()
			
		battery = self.rh.sensors.getBatteryLevels()['levels'][0]
		
		if battery < 25:
			self.rh.audio.setVolume(100)
			self.rh.audio.speak("My battery is low")
			self.sub.unregister()
			self.rh.humanoid_motion.goToPosture("Sit", 0.7)
			self.rh.motion.disableMotors()
			sys.exit(1)
Example #23
0
    def facecallback(self, event):
        # Callback of the top camera of the nao_robot

        # Use the bridge to convert the ROS Image message to OpenCV message

        if self.preparado is not True:
            return

        cv_image = self.bridge.imgmsg_to_cv2(self.ros_data,
                                             desired_encoding="passthrough")

        face_cascade = cv2.CascadeClassifier(
            'haarcascade_frontalface_default.xml')

        gray = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY)

        faces = face_cascade.detectMultiScale(gray,
                                              scaleFactor=1.2,
                                              minNeighbors=5,
                                              minSize=(30, 30),
                                              flags=cv2.cv.CV_HAAR_SCALE_IMAGE)

        # Draw a rectangle around the detected faces
        for (x, y, w, h) in faces:
            cv2.rectangle(cv_image, (x, y), (x + w, y + h), (255, 0, 0),
                          2)  # The center of the rectangle is: (x+w/2,y+h/2)

        # Look for the biggest face, the one to track
        if len(faces) > 0:
            c = faces[0]
            for i in faces:
                if i[2] > c[2]:
                    c = i

            cv2.rectangle(cv_image, (c[0], c[1]), (c[0] + c[2], c[1] + c[3]),
                          (255, 255, 0), 2)

            center = (c[0] + c[2] / 2, c[1] + c[3] / 2)  # Center of the face
            cv2.circle(cv_image, center, 3, (0, 0, 255), -1)

        else:
            center = (cv_image.shape[1] / 2, cv_image.shape[0] / 2
                      )  # Image center
            print "No faces found"

        # Convert the image format from OpenCV to ROS and publish the modified image on a new topic
        ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8")
        self.publisher.publish(ros_image)

        ### Work out the movemento of the head required to keep the ball center in the image center
        # Velocities message initialization
        joint = JointAnglesWithSpeed()
        joint.joint_names.append("HeadYaw")
        joint.joint_names.append("HeadPitch")
        joint.speed = 0.1
        joint.relative = True  #True

        # Get center of detected object and calculate the head turns
        target_x = center[0]  # centroid coordenate x
        target_y = center[1]  # centroid coordenate y

        # Image center -> (cv_image.shape[1]/2, cv_image.shape[0]/2)=(320/2,240/2)    (x,y) coordenates
        sub_x = target_x - cv_image.shape[1] / 2
        sub_y = target_y - cv_image.shape[0] / 2

        var_x = (sub_x / float(cv_image.shape[1] / 2))
        var_y = (sub_y / float(cv_image.shape[0] / 2))

        # Check the Head position before moving it to restrict its movement and prevent it from crashing with the shoulders

        # Predict the new Y relative position posicion, the one to be restricted to avoid crashes
        new_position_y = self.positions[1] + var_y * 0.10

        if (new_position_y <= -0.45):  # Recalculate the maximun variation
            var_y = 0
        elif (new_position_y >= 0.3):
            var_y = 0

        joint.joint_angles.append(-var_x * 0.35)
        joint.joint_angles.append(var_y * 0.15)

        self.joint_pub.publish(joint)