Exemple #1
0
    def take_tool(self, arm_index):
        '''Robot's response to TAKE_TOOL'''
        self._is_busy = True
        if self._demo_state == DemoState.READY_TO_TAKE:
            ## Robot closes the hand
            Arms.set_gripper_state(arm_index, GripperState.CLOSED, wait=True)
            
            ## Robot moves the hand near the camera to take a look at the tool
            self._move_to_arm_pose('look', arm_index, wait=True)
            self.tool_id = self.world.get_tool_id()

            if self.tool_id is None:
                ## Robot moves the arm back to the person can take the tool
                self._move_to_arm_pose('take', 0, wait=True)
                Response.say(RobotSpeech.ERROR_TOOL_NOT_RECOGNIZED)
                Response.perform_gaze_action(GazeGoal.SHAKE)
                self._demo_state = DemoState.NO_TOOL_NO_SURFACE
            else:
                self.session.new_action(self.tool_id)
                Response.say(RobotSpeech.RECOGNIZED_TOOL + str(self.tool_id))

                self._demo_state = DemoState.HAS_TOOL_NO_SURFACE
                self.detect_surface()
        else:
            Response.say(RobotSpeech.ERROR_NOT_IN_TAKE_STATE)

        rospy.loginfo('Current state: ' + self._demo_state)
        self._is_busy = False
Exemple #2
0
 def stop_execution(self, dummy=None):
     '''Stops ongoing execution'''
     if (self._demo_state == DemoState.PLAYING_DEMO and
         self.arms.is_executing()):
         self.arms.stop_execution()
         
         Respoonse.say(RobotSpeech.STOPPING_EXECUTION)
         Response.perform_gaze_action(GazeGoal.NOD)
     else:
         Response.perform_gaze_action(GazeGoal.SHAKE)
         Respoonse.say(RobotSpeech.ERROR_IS_NOT_PLAYING)
Exemple #3
0
    def stop_recording(self, dummy=None):
        '''Stops recording continuous motion'''
        self.busy = True

        if (self._demo_state == DemoState.RECORDING_DEMO):
            
            traj_step = ActionStep()
            traj_step.type = ActionStep.ARM_TRAJECTORY

            waited_time = Interaction._arm_trajectory.timing[0]

            for i in range(len(Interaction._arm_trajectory.timing)):
                Interaction._arm_trajectory.timing[i] -= waited_time
                Interaction._arm_trajectory.timing[i] += rospy.Duration(0.1)
            
            '''If motion was relative, record transformed pose'''
            traj_step.armTrajectory = ArmTrajectory(
                Interaction._arm_trajectory.rArm[:],
                Interaction._arm_trajectory.lArm[:],
                Interaction._arm_trajectory.timing[:],
                Interaction._arm_trajectory.rRefFrame,
                Interaction._arm_trajectory.lRefFrame,
                Interaction._arm_trajectory.rRefFrameObject,
                Interaction._arm_trajectory.lRefFrameObject)
            
            traj_step.gripperAction = GripperAction(
                                        self.arms.get_gripper_state(0),
                                        self.arms.get_gripper_state(1))
                                        
            self.session.add_step_to_action(traj_step,
                                        self.world.get_frame_list())
            

            Interaction._arm_trajectory = None
            Interaction._trajectory_start_time = None
            self.session.save_current_action()
            self.freeze_arm(0)

            self._demo_state = DemoState.HAS_RECORDED_DEMO

            Response.say(RobotSpeech.STOPPED_RECORDING)
            Response.perform_gaze_action(GazeGoal.NOD)

        else:

            Response.say(RobotSpeech.ERROR_NOT_RECORDING)
            Response.perform_gaze_action(GazeGoal.SHAKE)

        rospy.loginfo('Current state: ' + self._demo_state)
        self.busy = False
Exemple #4
0
    def replay_demonstration(self, dummy=None):
        '''Starts the execution of the current demonstration'''
        self.busy = True
        execution_z_offset = 0.00
        if (self._demo_state == DemoState.HAS_RECORDED_DEMO):
            self.session.save_current_action()
            action = self.session.get_current_action()
            self.arms.start_execution(action, execution_z_offset)
            Response.say(RobotSpeech.STARTED_REPLAY)
            self._demo_state = DemoState.PLAYING_DEMO
        else:
            Response.say(RobotSpeech.ERROR_CANNOT_REPLAY)
            Response.perform_gaze_action(GazeGoal.SHAKE)

        rospy.loginfo('Current state: ' + self._demo_state)
        self.busy = False
Exemple #5
0
    def release_tool(self, arm_index):
        self.busy = True
        if (self._demo_state != DemoState.READY_TO_TAKE and 
            self._demo_state != DemoState.PLAYING_DEMO and
            self._demo_state != DemoState.RECORDING_DEMO):
            self._move_to_arm_pose('take', 0, wait=True)
            self.arms.set_gripper_state(arm_index, GripperState.OPEN, wait=True)
            Response.say(RobotSpeech.TOOL_RELEASED)
            Response.perform_gaze_action(GazeGoal.GLANCE_RIGHT_EE)
            self._demo_state = DemoState.READY_TO_TAKE
        else:
            Response.perform_gaze_action(GazeGoal.SHAKE)
            Response.say(RobotSpeech.ERROR_NOT_IN_RELEASE_STATE)

        rospy.loginfo('Current state: ' + self._demo_state)
        self.busy = False
Exemple #6
0
    def default_response(self, responses):
        '''Default response to speech commands'''
        self.busy = True
        speech_resp = responses[0]
        gaze_resp = responses[1]

        # Speech response
        if (speech_resp != None):
            Response.say(speech_resp)
            Response.respond_with_sound(speech_resp)
        
        # Gaze response
        if (gaze_resp != None):
            Response.perform_gaze_action(gaze_resp)

        rospy.loginfo('Current state: ' + self._demo_state)
        self.busy = False
Exemple #7
0
    def detect_surface(self):
        self._is_busy = True

        if self._demo_state == DemoState.HAS_TOOL_NO_SURFACE:
            ## Robot moves the arm away and looks at the surface
            self._move_to_arm_pose('away', 0, wait=True)
            self.surface = self.world.get_surface()

            if self.surface is None:
                Response.say(RobotSpeech.ERROR_NO_SURFACE)
                Response.perform_gaze_action(GazeGoal.SHAKE)
            else:
                Response.say(RobotSpeech.SURFACE_DETECTED)
                self._move_to_arm_pose('ready', arm_index, wait=True)
                Response.say(RobotSpeech.READY_FOR_DEMO)
                self._demo_state = DemoState.READY_FOR_DEMO

        self._is_busy = False
Exemple #8
0
    def start_recording(self, dummy=None):
        '''Starts recording continuous motion'''
        self.busy = True
        if (self._demo_state == DemoState.READY_FOR_DEMO or
            self._demo_state == DemoState.HAS_RECORDED_DEMO):

            Interaction._arm_trajectory = ArmTrajectory()
            Interaction._trajectory_start_time = rospy.Time.now()

            if self.session.n_frames() > 0:
                self.session.clear_current_action()

            self.relax_arm(0)
            self._demo_state = DemoState.RECORDING_DEMO
            Response.say(RobotSpeech.STARTED_RECORDING)
            Response.perform_gaze_action(GazeGoal.NOD)
        elif (self._demo_state == DemoState.RECORDING_DEMO):
            Response.perform_gaze_action(GazeGoal.SHAKE)
            Response.say(RobotSpeech.ERROR_ALREADY_RECORDING)
        else:
            Response.perform_gaze_action(GazeGoal.SHAKE)
            Response.say(RobotSpeech.ERROR_NOT_READY_TO_RECORD)

        rospy.loginfo('Current state: ' + self._demo_state)
        self.busy = False
Exemple #9
0
    def _end_replay(self):
        '''Responses for when the action execution ends'''
        if (self.arms.status == ExecutionStatus.SUCCEEDED):
            if self._demo_state == DemoState.PLAYING_DEMO:
                Response.say(RobotSpeech.ENDED_REPLAY)
                Response.perform_gaze_action(GazeGoal.NOD)
                self._demo_state = DemoState.HAS_RECORDED_DEMO
            else:
                rospy.loginfo('Non-replay motion successful.')
        elif (self.arms.status == ExecutionStatus.PREEMPTED):
            if self._demo_state == DemoState.PLAYING_DEMO:
                Response.say(RobotSpeech.ERROR_PREEMPTED_REPLAY)
                Response.perform_gaze_action(GazeGoal.SHAKE)
                self._demo_state = DemoState.HAS_RECORDED_DEMO
            else:
                rospy.loginfo('Non-replay motion pre-empted.')
        else:
            if self._demo_state == DemoState.PLAYING_DEMO:
                Response.say(RobotSpeech.ERROR_REPLAY_NOT_POSSIBLE)
                Response.perform_gaze_action(GazeGoal.SHAKE)
                self._demo_state = DemoState.HAS_RECORDED_DEMO
            else:
                rospy.loginfo('Non-replay motion has no IK.')

        self.arms.status = ExecutionStatus.NOT_EXECUTING
Exemple #10
0
    def __init__(self):
        self.arms = Arms()
        self.world = World()
        self.session = Session(object_list=self.world.get_frame_list(),
                               is_debug=True)
        self._viz_publisher = rospy.Publisher('visualization_marker_array', MarkerArray)
        self._demo_state = None
        self._is_busy = True

        rospy.Subscriber('recognized_command', Command, self.speech_command_cb)
        rospy.Subscriber('gui_command', GuiCommand, self.gui_command_cb)

        self.responses = {
            Command.TEST_MICROPHONE: Response(self.default_response,
                                [RobotSpeech.TEST_RESPONSE, GazeGoal.NOD]),
            Command.TAKE_TOOL: Response(self.take_tool, 0),
            Command.RELEASE_TOOL: Response(self.release_tool, 0),
            Command.START_RECORDING: Response(self.start_recording, None),
            Command.STOP_RECORDING: Response(self.stop_recording, None),
            Command.REPLAY_DEMONSTRATION: Response(self.replay_demonstration, None)
            Command.DETECT_SURFACE: Response(self.detect_surface, None)
        }

        rospy.loginfo('Will wait until arms ready to respond.')
        while ((self.arms.get_ee_state(0) is None) or
            (self.arms.get_ee_state(1) is None)):
            time.sleep(0.1)

        rospy.loginfo('Starting to move to the initial pose.')
        # TODO: Make it possible to take with either hand
        self._move_to_arm_pose('take', 0)
        self._move_to_arm_pose('away', 1)
        self._wait_for_arms()
        self._demo_state = DemoState.READY_TO_TAKE
        Response.say(RobotSpeech.HAND_TOOL_REQUEST)
        Response.perform_gaze_action(GazeGoal.GLANCE_RIGHT_EE)
        self._is_busy = False
        rospy.loginfo('Interaction initialized.')
Exemple #11
0
    def _end_execution(self):
        '''Responses for when the action execution ends'''
        if (self.arms.status == ExecutionStatus.SUCCEEDED):
            Response.say(RobotSpeech.EXECUTION_ENDED)
            Response.perform_gaze_action(GazeGoal.NOD)
        elif (self.arms.status == ExecutionStatus.PREEMPTED):
            Response.say(RobotSpeech.EXECUTION_PREEMPTED)
            Response.perform_gaze_action(GazeGoal.SHAKE)
        else:
            Response.say(RobotSpeech.EXECUTION_ERROR_NOIK)
            Response.perform_gaze_action(GazeGoal.SHAKE)

        self.arms.status = ExecutionStatus.NOT_EXECUTING
Exemple #12
0
    def update(self):
        '''Periodic update for the two arms'''
        Arms.arms[0].update(self.is_executing())
        Arms.arms[1].update(self.is_executing())

        moving_arm = Arms._get_most_moving_arm()
        if (moving_arm != self.attended_arm and not self.is_executing()):
            if (moving_arm == -1):
                Response.perform_gaze_action(GazeGoal.LOOK_FORWARD)
            elif (moving_arm == 0):
                Response.perform_gaze_action(GazeGoal.FOLLOW_RIGHT_EE)
            else:
                Response.perform_gaze_action(GazeGoal.FOLLOW_LEFT_EE)
            self.attended_arm = moving_arm
Exemple #13
0
    def update(self):
        '''Periodic update for the two arms'''
        Arms.arms[0].update(self.is_executing())
        Arms.arms[1].update(self.is_executing())

        moving_arm = Arms._get_most_moving_arm()
        if (moving_arm != self.attended_arm and not self.is_executing()):
            if (moving_arm == -1):
                Response.perform_gaze_action(GazeGoal.LOOK_FORWARD)
            elif (moving_arm == 0):
                Response.perform_gaze_action(GazeGoal.FOLLOW_RIGHT_EE)
            else:
                Response.perform_gaze_action(GazeGoal.FOLLOW_LEFT_EE)
            self.attended_arm = moving_arm
Exemple #14
0
    def _execute_action_step(self, action_step):
        '''Executes the motion part of an action step'''
        # For each step check step type
        # If arm target action
        if (action_step.type == ActionStep.ARM_TARGET):
            rospy.loginfo('Will perform arm target action step.')

            if (not self.move_to_joints(action_step.armTarget.rArm,
                                        action_step.armTarget.lArm)):
                self.status = ExecutionStatus.OBSTRUCTED
                return False

        # If arm trajectory action
        elif (action_step.type == ActionStep.ARM_TRAJECTORY):

            rospy.loginfo('Will perform arm trajectory action step.')

            # First move to the start frame
            if (not self.move_to_joints(action_step.armTrajectory.rArm[0],
                                        action_step.armTrajectory.lArm[0])):
                self.status = ExecutionStatus.OBSTRUCTED
                return False

            #  Then execute the trajectory
            Arms.arms[0].exectute_joint_traj(action_step.armTrajectory.rArm,
                                             action_step.armTrajectory.timing)
            Arms.arms[1].exectute_joint_traj(action_step.armTrajectory.lArm,
                                             action_step.armTrajectory.timing)

            # Wait until both arms complete the trajectory
            while((Arms.arms[0].is_executing() or Arms.arms[1].is_executing())
                  and not self.preempt):
                time.sleep(0.01)
            rospy.loginfo('Trajectory complete.')

            # Verify that both arms succeeded
            if ((not Arms.arms[0].is_successful()) or
                (not Arms.arms[1].is_successful())):
                rospy.logwarn('Aborting execution; ' +
                              'arms failed to follow trajectory.')
                self.status = ExecutionStatus.OBSTRUCTED
                return False

        # If hand action do it for both sides
        if (action_step.gripperAction.rGripper !=
                            Arms.arms[0].get_gripper_state()):
            rospy.loginfo('Will perform right gripper action ' +
                          str(action_step.gripperAction.rGripper))
            Arms.arms[0].set_gripper(action_step.gripperAction.rGripper)
            Response.perform_gaze_action(GazeGoal.FOLLOW_RIGHT_EE)

        if (action_step.gripperAction.lGripper !=
                            Arms.arms[1].get_gripper_state()):
            rospy.loginfo('Will perform LEFT gripper action ' +
                          str(action_step.gripperAction.lGripper))
            Arms.arms[1].set_gripper(action_step.gripperAction.lGripper)
            Response.perform_gaze_action(GazeGoal.FOLLOW_LEFT_EE)

        # Wait for grippers to be done
        while(Arms.arms[0].is_gripper_moving() or
              Arms.arms[1].is_gripper_moving()):
            time.sleep(0.01)
        rospy.loginfo('Hands done moving.')

        # Verify that both grippers succeeded
        if ((not Arms.arms[0].is_gripper_at_goal()) or
            (not Arms.arms[1].is_gripper_at_goal())):
            rospy.logwarn('Hand(s) did not fully close or open!')

        return True
Exemple #15
0
    def _execute_action_step(self, action_step):
        '''Executes the motion part of an action step'''
        # For each step check step type
        # If arm target action
        if (action_step.type == ActionStep.ARM_TARGET):
            rospy.loginfo('Will perform arm target action step.')

            if (not self.move_to_joints(action_step.armTarget.rArm,
                                        action_step.armTarget.lArm)):
                self.status = ExecutionStatus.OBSTRUCTED
                return False

        # If arm trajectory action
        elif (action_step.type == ActionStep.ARM_TRAJECTORY):

            rospy.loginfo('Will perform arm trajectory action step.')

            # First move to the start frame
            if (not self.move_to_joints(action_step.armTrajectory.rArm[0],
                                        action_step.armTrajectory.lArm[0])):
                self.status = ExecutionStatus.OBSTRUCTED
                return False

            #  Then execute the trajectory
            Arms.arms[0].exectute_joint_traj(action_step.armTrajectory.rArm,
                                             action_step.armTrajectory.timing)
            Arms.arms[1].exectute_joint_traj(action_step.armTrajectory.lArm,
                                             action_step.armTrajectory.timing)

            # Wait until both arms complete the trajectory
            while ((Arms.arms[0].is_executing() or Arms.arms[1].is_executing())
                   and not self.preempt):
                time.sleep(0.01)
            rospy.loginfo('Trajectory complete.')

            # Verify that both arms succeeded
            if ((not Arms.arms[0].is_successful())
                    or (not Arms.arms[1].is_successful())):
                rospy.logwarn('Aborting execution; ' +
                              'arms failed to follow trajectory.')
                self.status = ExecutionStatus.OBSTRUCTED
                return False

        # If hand action do it for both sides
        if (action_step.gripperAction.rGripper !=
                Arms.arms[0].get_gripper_state()):
            rospy.loginfo('Will perform right gripper action ' +
                          str(action_step.gripperAction.rGripper))
            Arms.arms[0].set_gripper(action_step.gripperAction.rGripper)
            Response.perform_gaze_action(GazeGoal.FOLLOW_RIGHT_EE)

        if (action_step.gripperAction.lGripper !=
                Arms.arms[1].get_gripper_state()):
            rospy.loginfo('Will perform LEFT gripper action ' +
                          str(action_step.gripperAction.lGripper))
            Arms.arms[1].set_gripper(action_step.gripperAction.lGripper)
            Response.perform_gaze_action(GazeGoal.FOLLOW_LEFT_EE)

        # Wait for grippers to be done
        while (Arms.arms[0].is_gripper_moving()
               or Arms.arms[1].is_gripper_moving()):
            time.sleep(0.01)
        rospy.loginfo('Hands done moving.')

        # Verify that both grippers succeeded
        if ((not Arms.arms[0].is_gripper_at_goal())
                or (not Arms.arms[1].is_gripper_at_goal())):
            rospy.logwarn('Hand(s) did not fully close or open!')

        return True
Exemple #16
0
    def update_object_pose(self):
        ''' Function to externally update an object pose'''
        Response.perform_gaze_action(GazeGoal.LOOK_DOWN)
        while (Response.gaze_client.get_state() == GoalStatus.PENDING or
               Response.gaze_client.get_state() == GoalStatus.ACTIVE):
            time.sleep(0.1)

        if (Response.gaze_client.get_state() != GoalStatus.SUCCEEDED):
            rospy.logerr('Could not look down to take table snapshot')
            return False

        rospy.loginfo('Looking at table now.')
        goal = UserCommandGoal(UserCommandGoal.RESET, False)
        self._object_action_client.send_goal(goal)
        while (self._object_action_client.get_state() == GoalStatus.ACTIVE or
               self._object_action_client.get_state() == GoalStatus.PENDING):
            time.sleep(0.1)
        rospy.loginfo('Object recognition has been reset.')
        rospy.loginfo('STATUS: ' +
                      self._object_action_client.get_goal_status_text())
        self._reset_objects()

        if (self._object_action_client.get_state() != GoalStatus.SUCCEEDED):
            rospy.logerr('Could not reset recognition.')
            return False

        # Do segmentation
        goal = UserCommandGoal(UserCommandGoal.SEGMENT, False)
        self._object_action_client.send_goal(goal)
        while (self._object_action_client.get_state() == GoalStatus.ACTIVE or
               self._object_action_client.get_state() == GoalStatus.PENDING):
            time.sleep(0.1)
        rospy.loginfo('Table segmentation is complete.')
        rospy.loginfo('STATUS: ' +
                      self._object_action_client.get_goal_status_text())

        if (self._object_action_client.get_state() != GoalStatus.SUCCEEDED):
            rospy.logerr('Could not segment.')
            return False

        # Do recognition
        goal = UserCommandGoal(UserCommandGoal.RECOGNIZE, False)
        self._object_action_client.send_goal(goal)
        while (self._object_action_client.get_state() == GoalStatus.ACTIVE or
               self._object_action_client.get_state() == GoalStatus.PENDING):
            time.sleep(0.1)
        rospy.loginfo('Objects on the table have been recognized.')
        rospy.loginfo('STATUS: ' +
                      self._object_action_client.get_goal_status_text())

        # Record the result
        if (self._object_action_client.get_state() == GoalStatus.SUCCEEDED):
            wait_time = 0
            total_wait_time = 5
            while (not World.has_objects() and wait_time < total_wait_time):
                time.sleep(0.1)
                wait_time += 0.1

            if (not World.has_objects()):
                rospy.logerr('Timeout waiting for a recognition result.')
                return False
            else:
                rospy.loginfo('Got the object list.')
                return True
        else:
            rospy.logerr('Could not recognize.')
            return False
Exemple #17
0
    def update_object_pose(self):
        ''' Function to externally update an object pose'''
        Response.perform_gaze_action(GazeGoal.LOOK_DOWN)
        while (Response.gaze_client.get_state() == GoalStatus.PENDING
               or Response.gaze_client.get_state() == GoalStatus.ACTIVE):
            time.sleep(0.1)

        if (Response.gaze_client.get_state() != GoalStatus.SUCCEEDED):
            rospy.logerr('Could not look down to take table snapshot')
            return False

        rospy.loginfo('Looking at table now.')
        goal = UserCommandGoal(UserCommandGoal.RESET, False)
        self._object_action_client.send_goal(goal)
        while (self._object_action_client.get_state() == GoalStatus.ACTIVE or
               self._object_action_client.get_state() == GoalStatus.PENDING):
            time.sleep(0.1)
        rospy.loginfo('Object recognition has been reset.')
        rospy.loginfo('STATUS: ' +
                      self._object_action_client.get_goal_status_text())
        self._reset_objects()

        if (self._object_action_client.get_state() != GoalStatus.SUCCEEDED):
            rospy.logerr('Could not reset recognition.')
            return False

        # Do segmentation
        goal = UserCommandGoal(UserCommandGoal.SEGMENT, False)
        self._object_action_client.send_goal(goal)
        while (self._object_action_client.get_state() == GoalStatus.ACTIVE or
               self._object_action_client.get_state() == GoalStatus.PENDING):
            time.sleep(0.1)
        rospy.loginfo('Table segmentation is complete.')
        rospy.loginfo('STATUS: ' +
                      self._object_action_client.get_goal_status_text())

        if (self._object_action_client.get_state() != GoalStatus.SUCCEEDED):
            rospy.logerr('Could not segment.')
            return False

        # Do recognition
        goal = UserCommandGoal(UserCommandGoal.RECOGNIZE, False)
        self._object_action_client.send_goal(goal)
        while (self._object_action_client.get_state() == GoalStatus.ACTIVE or
               self._object_action_client.get_state() == GoalStatus.PENDING):
            time.sleep(0.1)
        rospy.loginfo('Objects on the table have been recognized.')
        rospy.loginfo('STATUS: ' +
                      self._object_action_client.get_goal_status_text())

        # Record the result
        if (self._object_action_client.get_state() == GoalStatus.SUCCEEDED):
            wait_time = 0
            total_wait_time = 5
            while (not World.has_objects() and wait_time < total_wait_time):
                time.sleep(0.1)
                wait_time += 0.1

            if (not World.has_objects()):
                rospy.logerr('Timeout waiting for a recognition result.')
                return False
            else:
                rospy.loginfo('Got the object list.')
                return True
        else:
            rospy.logerr('Could not recognize.')
            return False