def main(args): global state_manager global ph global art global action_server global left_gripper_pub global right_gripper_pub global gripper_timer rospy.init_node('test_brain') state_manager = InterfaceStateManager(InterfaceState.BRAIN_ID, callback) ph = ProgramHelper() state_manager.set_system_state(InterfaceState.STATE_IDLE) rospy.Service('/art/brain/program/start', ProgramIdTrigger, start_program) rospy.Service('/art/brain/learning/start', ProgramIdTrigger, learning_srv_cb) rospy.Service('/art/brain/learning/stop', Trigger, learning_srv_cb) # rospy.Service('/art/brain/program/stop', stopProgram, stop_program) art = ArtApiHelper() action_server = actionlib.SimpleActionServer( '/art/brain/learning_request', LearningRequestAction, execute_cb=learning_request_cb, auto_start=False) action_server.start() left_gripper_pub = rospy.Publisher('/art/pr2/left_arm/gripper/pose', PoseStamped, queue_size=10) right_gripper_pub = rospy.Publisher('/art/pr2/right_arm/gripper/pose', PoseStamped, queue_size=10) gripper_timer = rospy.Timer(rospy.Duration(0.5), gripper_timer) rospy.spin()
def __init__(self): pkg = rospkg.RosPack().get_path('art_sound') self.sounds = { InterfaceState.INFO: os.path.join(pkg, 'sounds', 'info.mp3'), InterfaceState.WARNING: os.path.join(pkg, 'sounds', 'warning.mp3'), InterfaceState.ERROR: os.path.join(pkg, 'sounds', 'error.mp3') } self.manager = InterfaceStateManager("art_sound", cb=self.state_cb) self.srv_info = rospy.Service("info", Empty, self.srv_info_cb) self.srv_warning = rospy.Service("warning", Empty, self.srv_warning_cb) self.srv_error = rospy.Service("error", Empty, self.srv_error_cb) self.q = Queue() self.th = Thread(target=self.thread_cb) self.th.daemon = True self.th.start()
def __init__(self): origin = rospy.get_param("scene_origin") size = rospy.get_param("scene_size") rpm = rospy.get_param("rpm") port = rospy.get_param("scene_server_port") super(UICoreRos, self).__init__(origin[0], origin[1], size[0], size[1], rpm, port) QtCore.QObject.connect(self, QtCore.SIGNAL('objects'), self.object_cb_evt) QtCore.QObject.connect(self, QtCore.SIGNAL('user_status'), self.user_status_cb_evt) QtCore.QObject.connect(self, QtCore.SIGNAL('interface_state'), self.interface_state_evt) QtCore.QObject.connect(self, QtCore.SIGNAL('touch_calibration_points_evt'), self.touch_calibration_points_evt) QtCore.QObject.connect(self, QtCore.SIGNAL('touch_detected_evt'), self.touch_detected_evt) QtCore.QObject.connect(self, QtCore.SIGNAL('notify_user_evt'), self.notify_user_evt) QtCore.QObject.connect(self, QtCore.SIGNAL('learning_request_done_evt'), self.learning_request_done_evt) self.user_state = None self.program_list = None self.program_vis = None self.template = False # TODO this should be stored in program_vis? self.last_prog_pos = (0.2, self.height - 0.2) self.last_edited_prog_id = None self.ph = ProgramHelper() cursors = rospy.get_param("~cursors", []) for cur in cursors: PoseStampedCursorItem(self.scene, cur) TouchTableItem( self.scene, '/art/interface/touchtable/touch', list(self.get_scene_items_by_type(PoseStampedCursorItem))) self.stop_btn = ButtonItem(self.scene, 0, 0, "STOP", None, self.stop_btn_clicked, 2.0, QtCore.Qt.red) self.stop_btn.setPos( self.scene.width() - self.stop_btn.boundingRect().width() - 300, self.scene.height() - self.stop_btn.boundingRect().height() - 60) self.stop_btn.set_enabled(True) self.projectors = [] projs = rospy.get_param("~projectors", []) for proj in projs: self.projectors.append(ProjectorHelper(proj)) rospy.loginfo("Waiting for /art/brain/learning_request") self.learning_action_cl = actionlib.SimpleActionClient( '/art/brain/learning_request', LearningRequestAction) self.learning_action_cl.wait_for_server() self.art = ArtApiHelper() self.projectors_calibrated_pub = rospy.Publisher( "~projectors_calibrated", Bool, queue_size=1, latch=True) self.projectors_calibrated_pub.publish(False) self.start_learning_srv = rospy.ServiceProxy( '/art/brain/learning/start', startProgram) # TODO wait for service? where? self.stop_learning_srv = rospy.ServiceProxy( '/art/brain/learning/stop', Trigger) # TODO wait for service? where? self.program_pause_srv = rospy.ServiceProxy('/art/brain/program/pause', Trigger) self.program_resume_srv = rospy.ServiceProxy( '/art/brain/program/resume', Trigger) self.program_stop_srv = rospy.ServiceProxy('/art/brain/program/stop', Trigger) self.emergency_stop_srv = rospy.ServiceProxy( '/pr2_ethercat/halt_motors', EmptyService) # TODO wait for service? where? self.emergency_stop_reset_srv = rospy.ServiceProxy( '/pr2_ethercat/reset_motors', EmptyService) # TODO wait for service? where? self.program_error_resolve_srv = rospy.ServiceProxy( '/art/brain/program/error_response', ProgramErrorResolve) # TODO wait for service? where? self.program_error_dialog = None self.grasp_dialog = None self.emergency_stopped = False rospy.loginfo("Waiting for ART services...") self.art.wait_for_api() rospy.loginfo("Ready! Starting state machine.") # TODO move this to ArtApiHelper ?? self.obj_sub = rospy.Subscriber('/art/object_detector/object_filtered', InstancesArray, self.object_cb, queue_size=1) self.user_status_sub = rospy.Subscriber('/art/user/status', UserStatus, self.user_status_cb, queue_size=1) self.touch_points = None self.touch_calib_srv = rospy.Service( '/art/interface/projected_gui/touch_calibration', TouchCalibrationPoints, self.touch_calibration_points_cb) self.notify_user_srv = rospy.Service( '/art/interface/projected_gui/notify_user', NotifyUser, self.notify_user_srv_cb) proj_calib = True if len(self.projectors) > 0: rospy.loginfo("Waiting for projector nodes...") for proj in self.projectors: proj.wait_until_available() if not proj.is_calibrated(): proj_calib = False if proj_calib: rospy.loginfo('Projectors already calibrated.') self.projectors_calibrated_pub.publish(True) else: rospy.loginfo( 'Projectors not calibrated yet - waiting for command...') self.projector_calib_srv = rospy.Service( '/art/interface/projected_gui/calibrate_projectors', Trigger, self.calibrate_projectors_cb) self.state_manager = InterfaceStateManager("PROJECTED UI", cb=self.interface_state_cb)
def __init__(self): origin = rospy.get_param("scene_origin") size = rospy.get_param("scene_size") rpm = rospy.get_param("rpm") port = rospy.get_param("scene_server_port") super(UICoreRos, self).__init__(origin[0], origin[1], size[0], size[1], rpm, port) QtCore.QObject.connect(self, QtCore.SIGNAL('objects'), self.object_cb_evt) QtCore.QObject.connect(self, QtCore.SIGNAL('user_status'), self.user_status_cb_evt) QtCore.QObject.connect(self, QtCore.SIGNAL('interface_state'), self.interface_state_evt) QtCore.QObject.connect(self, QtCore.SIGNAL('touch_calibration_points_evt'), self.touch_calibration_points_evt) QtCore.QObject.connect(self, QtCore.SIGNAL('touch_detected_evt'), self.touch_detected_evt) QtCore.QObject.connect(self, QtCore.SIGNAL('notify_user_evt'), self.notify_user_evt) QtCore.QObject.connect(self, QtCore.SIGNAL('learning_request_done_evt'), self.learning_request_done_evt) self.user_status = None self.fsm = FSM() # TODO do this automatically?? # map callbacks from FSM to this instance self.fsm.cb_start_calibration = self.cb_start_calibration self.fsm.cb_waiting_for_user = self.cb_waiting_for_user self.fsm.cb_program_selection = self.cb_program_selection self.fsm.cb_waiting_for_user_calibration = self.cb_waiting_for_user_calibration self.fsm.cb_learning = self.cb_learning self.fsm.cb_running = self.cb_running self.fsm.is_template = self.is_template self.program_vis = None self.template = False # TODO this should be stored in program_vis? self.state_manager = InterfaceStateManager("PROJECTED UI", cb=self.interface_state_cb) self.ph = ProgramHelper() cursors = rospy.get_param("~cursors", []) for cur in cursors: PoseStampedCursorItem(self.scene, cur) TouchTableItem( self.scene, '/art/interface/touchtable/touch', list(self.get_scene_items_by_type(PoseStampedCursorItem))) self.stop_btn = ButtonItem(self.scene, 0, 0, "STOP", None, self.stop_btn_clicked, 2.0, QtCore.Qt.red) self.stop_btn.setPos( self.scene.width() - self.stop_btn.boundingRect().width() - 300, self.scene.height() - self.stop_btn.boundingRect().height() - 60) self.stop_btn.set_enabled(True) self.projectors = [] projs = rospy.get_param("~projectors", []) for proj in projs: self.add_projector(proj) rospy.loginfo("Waiting for /art/brain/learning_request") self.learning_action_cl = actionlib.SimpleActionClient( '/art/brain/learning_request', LearningRequestAction) self.learning_action_cl.wait_for_server() self.art = ArtApiHelper() self.projectors_calibrated_pub = rospy.Publisher( "~projectors_calibrated", Bool, queue_size=1, latch=True) self.projectors_calibrated_pub.publish(False) self.start_learning_srv = rospy.ServiceProxy( '/art/brain/learning/start', Trigger) # TODO wait for service? where? self.stop_learning_srv = rospy.ServiceProxy( '/art/brain/learning/stop', Trigger) # TODO wait for service? where? self.emergency_stop_srv = rospy.ServiceProxy( '/pr2_ethercat/halt_motors', EmptyService) # TODO wait for service? where? self.emergency_stop_reset_srv = rospy.ServiceProxy( '/pr2_ethercat/reset_motors', EmptyService) # TODO wait for service? where? self.program_error_resolve_srv = rospy.ServiceProxy( '/art/brain/program/error_response', ProgramErrorResolve) # TODO wait for service? where? self.program_error_dialog = None self.grasp_dialog = None self.emergency_stoped = False