Exemplo n.º 1
0
def send_arm_joint_speed(q_dot, is_left_arm = False):
    if is_left_arm:
        g = gen_larm_joint_vel_dict(q_dot)
    else:
        g = gen_rarm_joint_vel_dict(q_dot)

    PyPR2.moveArmWithJointVelocity(**g)
Exemplo n.º 2
0
    def checkin(self):
        if self.appdisabled:
            return False

        try:
            self.twtaccess = Twitter(
                auth=OAuth(tininfo.TiNTwOAToken, tininfo.TiNTwOASecret,
                           tininfo.TiNTwConKey, tininfo.TiNTwConSecret))
        except:
            print 'Unable to check into PyRIDE PR2 twitter app. Try again in 10mins.'
            return False

        self.startupTime = time.localtime()
        self.updatestatus(
            time.strftime("I'm back online at %H:%M to serve ",
                          time.localtime()) + tininfo.TiNLocation + ".")

        self.token = self.lastoken
        self.getmessages()
        tinstate.updateStatus(constants.NEW_MESSAGES, len(self.messages) == 0)
        tid = PyPR2.addTimer(60, -1, 60)
        self.timercontext[tid] = 'getmsg'
        timermanager.addTimer(tid, self)
        #add purge archive time
        purgetime = timermanager.calcTimePeriodFromNow("4:00")
        if purgetime > 0:
            tid = PyPR2.addTimer(purgetime, -1, 24 * 60 * 3600)
            self.timercontext[tid] = 'purgemsg'
            timermanager.addTimer(tid, self)

        return True
Exemplo n.º 3
0
def timerActions( id ):
  

  	global a,b,x, y , last_x, tracking_data, csvFile, csvFileCounter,LostConnectionCounter,NumPeople, msgTryTimer, NEW_INTERACTION_INITIALISER
	
  	
  
  
 	if msgTryTimer == id :
 
    		
    		 b +=1   		    		
	    	 vel_x = x - last_x
    		 last_x = x
         	 if vel_x < -0.01:
    		    	PyPR2.moveArmWithJointPos(**initial_left)
			a+=1

		 tracking_data.append((x,y,x,y,time.time(),NumPeople,"empty",1,LostConnectionCounter))
		
		#b +=1
		#updateCsv()

		 with open(csvFile, "w") as output:
   		 		writer = csv.writer(output, lineterminator='\n')
    	   			writer.writerows(tracking_data)
Exemplo n.º 4
0
def find_human():
	
	global HUMAN_DETECTION_COUNTER,revolve_counter
	while revolve_counter==1 and PyPR2.getHeadPos()[0]<1.5 and HUMAN_DETECTION_COUNTER==0:
			
	
				revolve_cw()
		

	
	while revolve_counter==-1 and PyPR2.getHeadPos()[0]>-1.5 and HUMAN_DETECTION_COUNTER==0:
	
				revolve_acw()

	while PyPR2.getHeadPos()[0] >1.2 and HUMAN_DETECTION_COUNTER==0:

				revolve_counter = -1
				revolve_acw()
				find_human()

		
	while PyPR2.getHeadPos()[0] <-1.2 and HUMAN_DETECTION_COUNTER==0:
		
				revolve_counter = 1
				revolve_cw()
				find_human()

	if HUMAN_DETECTION_COUNTER!=0:
				adjust_to_shooting(PyPR2.getHeadPos()[1])
Exemplo n.º 5
0
    def startDataRecording(self, mode, filename=""):
        cmd = 'rosbag record -b 1024 '
        str = ''
        if mode & constants.REC_CAM:
            #cmd = cmd + '-e "/(.*)_stereo/(left|right)/image_rect_color" '
            cmd = cmd + '-e "/wide_stereo/left/image_rect_color" '  # record only one camera data
            str = '_cam'
        if mode & constants.REC_KINECT:
            cmd = cmd + '"/camera/rgb/image_rect_color" "/camera/depth_registered/image_rect" '
            str = '_kinect'
        if mode & constants.REC_SCAN:
            cmd = cmd + '-e "/(.*)_scan$" '
            str = str + '_laser'
        if mode & constants.REC_IMU:
            cmd = cmd + '"/torso_lift_imu/data" '
            str = str + '_imu'
        if mode & constants.REC_JOINTS:
            cmd = cmd + '"/joint_states" '
            str = str + '_joint'
        if mode & constants.REC_TF:
            cmd = cmd + '"/tf" '
            str = str + '_tf'

        if filename == "":
            cmd = cmd + '--duration=1m --split -O %s/%s%s_data.bag' % \
              (RECORDED_DATA_DIR, time.strftime( "%Y%m%d_%H%M", time.localtime()), str)
        else:
            cmd = cmd + '--duration=1m --split -O %s/%s.bag' % \
              (RECORDED_DATA_DIR, filename)

        if self.recording:
            self.killProc(self.recording)

        self.recording = self.spawnProc(cmd)
        PyPR2.say("Start data recording!")
Exemplo n.º 6
0
def find_human():

    global HUMAN_DETECTION_COUNTER, revolve_counter
    while revolve_counter == 1 and PyPR2.getHeadPos(
    )[0] < 1.5 and HUMAN_DETECTION_COUNTER == 0:

        revolve_cw()

    while revolve_counter == -1 and PyPR2.getHeadPos(
    )[0] > -1.5 and HUMAN_DETECTION_COUNTER == 0:

        revolve_acw()

    while PyPR2.getHeadPos()[0] > 1.2 and HUMAN_DETECTION_COUNTER == 0:

        revolve_counter = -1
        revolve_acw()
        find_human()

    while PyPR2.getHeadPos()[0] < -1.2 and HUMAN_DETECTION_COUNTER == 0:

        revolve_counter = 1
        revolve_cw()
        find_human()

    if HUMAN_DETECTION_COUNTER != 0:
        adjust_to_shooting(PyPR2.getHeadPos()[1])
Exemplo n.º 7
0
def adjust_to_shooting():
    global last_proximity
    proximity = check_head_proximity()

    if proximity == True and last_proximity == False:
        PyPR2.moveBodyTo(0.0, 0.0, (0.65) * PyPR2.getHeadPos()[0], 1)
        #PyPR2.moveHeadTo(0.0,y)
    last_proximity = proximity
Exemplo n.º 8
0
    def startJoystickControl(self):
        if self.joyControl:
            print 'already in joystick control mode'
            return

        self.joyControl = self.spawnProc(
            'roslaunch pr2_teleop teleop_joystick.launch > /dev/null 2>&1')
        PyPR2.say("Start joystick control.")
Exemplo n.º 9
0
    def purgearchive(self):
        if len(self.archive) == 0:
            PyPR2.say("No archived message to be deleted.")
            return

        self.archive = []
        tinstate.updateStatus(constants.ARCHIVE_MESSAGES, True)
        PyPR2.say("All archived message have been deleted.")
Exemplo n.º 10
0
def systemShutdownActions():
    global myMessenger
    global extProcCall

    PyPR2.say('I am going off line. Goodbye.')

    myMessenger.checkout()
    extProcCall.fini()
Exemplo n.º 11
0
def adjust_to_shooting():
	global last_proximity
	proximity = check_head_proximity()

	if proximity== True and last_proximity==False:
		PyPR2.moveBodyTo(0.0,0.0,(0.65)*PyPR2.getHeadPos()[0],1)
		#PyPR2.moveHeadTo(0.0,y)
	last_proximity = proximity
Exemplo n.º 12
0
def timerActions(id):
    global myMessenger, msgTryTimer

    if msgTryTimer == id and myMessenger.checkin():
        PyPR2.removeTimer(msgTryTimer)
        msgTryTimer = -1
    else:
        timermanager.onTimerCall(id)
Exemplo n.º 13
0
def onHumanDetected(objtype, nameid, trackid, status):
	global csvCounter,csvFile,actionIdentifier,isNearest
	PyPR2.moveTorsoBy(0.1,2)
	PyPR2.say("This exit is closed,   please use the other exit")
	csvCounter += 1
	csvFile = "/home/demoshare/sid_stuff/aggressiveBehaviourExperiment/test"+str(csvCounter)+"_"+str(time.time())+".csv"

	actionIdentifier = "Human Detected"
	isNearest = False
Exemplo n.º 14
0
def batteryChargeChangeActions(batpc, isplugged, time_remain):
    global myMessenger

    if batpc < 20 and not isplugged:
        PyPR2.say("I'm low on battery, please put me back on main power.")

        if myMessenger:
            myMessenger.updatestatus(
                "I have only %d percent battery power left!" % batpc)
Exemplo n.º 15
0
def deactivate_trajectory_input():
    global rt_cnt, rt_position, rt_velocity, rt_acceleration, rt_orientation
    rt_cnt = 0
    PyPR2.registerRawTrajectoryInput( None )
    assert PyPR2.useJointVelocityControl(False)
    rt_orientation   = None
    rt_position      = numpy.zeros(3)
    rt_velocity      = numpy.zeros(3)
    rt_acceleration  = numpy.zeros(3)
Exemplo n.º 16
0
def twist_forearm(angle = 180.0, is_left_arm = True):
        if is_left_arm:
            joint_name = 'l_forearm_roll_joint'
        else:
            joint_name = 'r_forearm_roll_joint'
            
        jd   = PyPR2.getArmJointPositions(is_left_arm)
        jd[joint_name] = jd[joint_name] + angle*math.pi/180.0
        PyPR2.moveArmWithJointPos(**jd)    
Exemplo n.º 17
0
def set_lg(x = 1):
    '''
    sets the position of left gripper from 0 to 8
    '''
    if x > 8:
        x = 8
    if x < 0:
        x = 0

    PyPR2.setGripperPosition(1, 0.01*x)
Exemplo n.º 18
0
def set_rg(x = 1):
    '''
    sets the position of right gripper from 0 to 8
    '''
    if x > 8:
        x = 8
    if x < 0:
        x = 0

    PyPR2.setGripperPosition(2, 0.01*x)
Exemplo n.º 19
0
def on_move_arm_failed( is_left_arm ):
    global larm_failed, rarm_failed
    if hasattr( PyPR2, 'onMoveArmPoseFailed' ) and hasattr( PyPR2.onMoveArmPoseFailed, '__call__' ):
      PyPR2.onMoveArmPoseFailed( is_left_arm )
    if is_left_arm:
        print "larm failed"
        larm_failed = True
    else:
        rarm_failed = True
        print "rarm failed"
Exemplo n.º 20
0
def on_move_arm_finished( is_left_arm ):
    global larm_reached, rarm_reached
    if hasattr( PyPR2, 'onMoveArmPoseComplete' ) and hasattr( PyPR2.onMoveArmPoseComplete, '__call__' ):
      PyPR2.onMoveArmPoseComplete( is_left_arm )
    if is_left_arm:
        print "larm reached"
        larm_reached = True
    else:
        print "rarm reached"
        rarm_reached = True
Exemplo n.º 21
0
def larm_elbow_position():
    '''
    returns the elbow position of the right arm comparable to function wrist_position() in the arm object
    '''    
    p0 = PyPR2.getRelativeTF('torso_lift_link' , 'l_shoulder_pan_link')['position']
    p  = PyPR2.getRelativeTF('torso_lift_link' , 'l_elbow_flex_link')['position']
    x = p[0] - p0[0]
    y = p[1] - p0[1]
    z = p[2] - p0[2]
    pos = numpy.array([x,y,z])
    return(pos)    
Exemplo n.º 22
0
 def onHumanDetected( self, objtype, trackid, nameid, status ):
   if status == NEW_OBJ or status == REC_OBJ:
     if nameid == 0:
       nameid = 3
     if nameid not in self.seen_people:
       self.seen_people[nameid] = KnownPerson( nameid )
       PyPR2.say( 'Hello {}'.format(self.seen_people[nameid].label) )
     elif self.seen_people[nameid].last_seen - time.time() > 15*60:
       PyPR2.say( 'Hello {}'.format(self.seen_people[nameid].label) )
   elif status == LOST_OBJ:
     if nameid in self.seen_people:
       self.seen_people[nameid].last_seen = time.time()
Exemplo n.º 23
0
 def onHumanDetected(self, objtype, trackid, nameid, status):
     if status == NEW_OBJ or status == REC_OBJ:
         if nameid == 0:
             nameid = 3
         if nameid not in self.seen_people:
             self.seen_people[nameid] = KnownPerson(nameid)
             PyPR2.say('Hello {}'.format(self.seen_people[nameid].label))
         elif self.seen_people[nameid].last_seen - time.time() > 15 * 60:
             PyPR2.say('Hello {}'.format(self.seen_people[nameid].label))
     elif status == LOST_OBJ:
         if nameid in self.seen_people:
             self.seen_people[nameid].last_seen = time.time()
Exemplo n.º 24
0
    def play(self):
        if not self.target_x:
            print "No position set for target"
            self.action_finishes = True
            return

        if not self.action_finishes:
            print "still playing action '%s'" % self.name
            return

        PyPR2.pointHeadTo("base_link", self.target_x, self.target_y,
                          self.target_z)
Exemplo n.º 25
0
def restoreInitialState():
	global LostConnectionCounter, NumPeople, tracking_data, NEW_INTERACTION_INITIALISER,csvFile,csvFileCounter,last_x

	PyPR2.tuckBothArms()												#for the time being

	#NumPeople = 0
	tracking_data = ['focus_x','focus_y','avg_x','avg_y','time','speechCommand','actionCounter']
 	NEW_INTERACTION_INITIALISER = 0
 	csvFileCounter += 1
 	csvFile = "/home/demoshare/sid_stuff/Nov26TrackData/"+str(csvFileCounter)+".csv"										#change the approprite csvFile
	LostConnectionCounter =0 
	last_x = 0
Exemplo n.º 26
0
def activate_trajectory_input():
    '''
    To start receiving raw trajectory input, you should send a ROS service request call:
    rosservice call /rhyth_dmp/recall_dmp_traj <figure name> <amplitude> <frequency> <sampling frequency> <number of cycles>
    For example:
    rosservice call /rhyth_dmp/recall_dmp_traj fig8 0.05 0.2 20 3

    generates 3 cycles of figure eight with frequency 1.0
    '''
    global rt_cnt
    rt_cnt = 0
    PyPR2.registerRawTrajectoryInput( on_trajectory_received )
    assert PyPR2.useJointVelocityControl(True)
Exemplo n.º 27
0
def powerPlugChangeActions(isplugged):
    global myMessenger

    text = ""
    if isplugged:
        text = "I'm on main power."
    else:
        text = "I'm on battery power."

    PyPR2.say(text)

    if myMessenger:
        myMessenger.updatestatus(text)
Exemplo n.º 28
0
def pos_larm_grip_wrt_tor_shpan():
    '''
    Returns the left arm gripper position vector(endeffector position) with respect to the torso shoulder pan joint center
    '''
    p0 = PyPR2.getRelativeTF('torso_lift_link' , 'l_shoulder_pan_link')['position']
    pr = vecmat.as_vector(PyPR2.getRelativeTF('torso_lift_link' , 'l_gripper_r_finger_tip_link')['position'])
    pl = vecmat.as_vector(PyPR2.getRelativeTF('torso_lift_link' , 'l_gripper_l_finger_tip_link')['position'])
    p  = (pl + pr)/2
    x = p[0] - p0[0]
    y = p[1] - p0[1]
    z = p[2] - p0[2]
    pos = numpy.array([x,y,z])
    return(pos)    
Exemplo n.º 29
0
def onHumanDetected(objtype, trackid, nameid, status):	
	global HUMAN_DETECTION_COUNTER,start_time,torso_position_counter,msgTryTimer
	#PyPR2.say("Target Detect ed")
	#PyPR2.moveTorsoBy(0.1,3)
	#PyPR2.moveBodyTo(0.1,0.0,0.0,4)
	if HUMAN_DETECTION_COUNTER==0:
	
		track_x.append(('x','time'))
	HUMAN_DETECTION_COUNTER+=1
	msgTryTimer=-1
	start_time = time.time()
	if torso_position_counter ==0:
		PyPR2.moveTorsoBy(0.1,3)
		torso_position_counter +=1
Exemplo n.º 30
0
def onHumanDetected(objtype, trackid, nameid, status):
    global HUMAN_DETECTION_COUNTER, start_time, torso_position_counter, msgTryTimer
    #PyPR2.say("Target Detect ed")
    #PyPR2.moveTorsoBy(0.1,3)
    #PyPR2.moveBodyTo(0.1,0.0,0.0,4)
    if HUMAN_DETECTION_COUNTER == 0:

        track_x.append(('x', 'time'))
    HUMAN_DETECTION_COUNTER += 1
    msgTryTimer = -1
    start_time = time.time()
    if torso_position_counter == 0:
        PyPR2.moveTorsoBy(0.1, 3)
        torso_position_counter += 1
Exemplo n.º 31
0
def onHumanTracking(tracking_objs):
	#global busymoving
	#SHOOTING_TAG = 0
	#global start_time,last_action_counter,movement_tracker,msgTryTimer,d,x,y,track_x,track_y,track_d,elapsed_time,focus_obj,HUMAN_DETECTION_COUNTER
	global a2,b2,NEW_INTERACTION_INITIALISER, NumPeople, HumanPresence, x, y, msgTryTimer
	NumPeople = len(tracking_objs)
	a2 +=1 
	if len(tracking_objs)==0:
		NEW_INTERACTION_INITIALISER += 1
		if HumanPresence == True:
			PyPR2.removeTimer(msgTryTimer)
		
		if NEW_INTERACTION_INITIALISER > 500:
				restoreInitialState()

 		HumanPresence = False

	elif len(tracking_objs) > 0:
			if HumanPresence== False:
			
				PyPR2.onTimer = timerActions
				msgTryTimer = PyPR2.addTimer(1,-1,0.5)
				b2+=1
			#st_time = time.time()
			#no_objTracker.append(elapsed_time)
				HumanPresence = True

			object_index = closest_obj_index(tracking_objs)
			focus_obj = tracking_objs[object_index]
			x = focus_obj['est_pos'][0]
			y = focus_obj['est_pos'][1]


			mid_x = focus_obj['bound'][0] + focus_obj['bound'][2] / 2
      			
			mid_y = focus_obj['bound'][1] + focus_obj['bound'][3] / 2
     			
			ofs_x = mid_x - 320
    			ofs_y = mid_y - 240
      			chx = chy = 0.0
			
    			if math.fabs(ofs_x) > 10:
       				chx = -ofs_x * 90.0 / 640 * 0.01745329252	
				#head_yaw_list.append(chx)
				
   			if math.fabs(ofs_y) > 10:
        			chy = ofs_y * 90.0 / 640 * 0.01745329252

			PyPR2.updateHeadPos( chx, chy )
Exemplo n.º 32
0
def track_human(focus_obj):
			mid_x = focus_obj['bound'][0] + focus_obj['bound'][2] / 2
      			mid_y = focus_obj['bound'][1] + focus_obj['bound'][3] / 2
      			#print "track obj {} mid pt ({}.{})".format(focus_obj['track_id'],mid_x,mid_y)
      			ofs_x = mid_x - 320
      			ofs_y = mid_y - 240
      			chx = chy = 0.0
			
      			if math.fabs(ofs_x) > 10:
       				chx = -ofs_x * 90.0 / 640 * 0.01745329252	
				#head_yaw_list.append(chx)
				
      			if math.fabs(ofs_y) > 10:
        			chy = ofs_y * 90.0 / 640 * 0.01745329252
      				PyPR2.updateHeadPos( chx, chy )
Exemplo n.º 33
0
def track_human(focus_obj):
    mid_x = focus_obj['bound'][0] + focus_obj['bound'][2] / 2
    mid_y = focus_obj['bound'][1] + focus_obj['bound'][3] / 2
    #print "track obj {} mid pt ({}.{})".format(focus_obj['track_id'],mid_x,mid_y)
    ofs_x = mid_x - 320
    ofs_y = mid_y - 240
    chx = chy = 0.0

    if math.fabs(ofs_x) > 10:
        chx = -ofs_x * 90.0 / 640 * 0.01745329252
    #head_yaw_list.append(chx)

    if math.fabs(ofs_y) > 10:
        chy = ofs_y * 90.0 / 640 * 0.01745329252
        PyPR2.updateHeadPos(chx, chy)
Exemplo n.º 34
0
def rotate_robot_to(desired_angle, time_to_reach = None, in_degrees = True):
    global body_reached, body_failed
    max_speed = math.pi/18.0 # (The maximum speed is 10 degrees per second)
    if in_degrees:
        gain = math.pi/180.0
    else:
        gain = 1.0     
    body_reached = False
    body_failed  = False
    tau0         = body_angle(in_degrees = in_degrees)
    d_theta      = gain*(desired_angle - tau0)
    if time_to_reach == None:
        time_to_reach = abs(d_theta/max_speed)    

    PyPR2.moveBodyTo(0.0, 0.0, d_theta, time_to_reach + 2.0) 	
Exemplo n.º 35
0
def take_robot_to(X, Y, time_to_reach = 5.0):
    global body_reached, body_failed
    body_reached = False
    body_failed  = False
    rp = PyPR2.getRobotPose()
    P0 = rp['position']
    dX = X - P0[0]
    dY = Y - P0[1]
    tau = body_angle(in_degrees = False)
    S   = math.sin(tau)
    C   = math.cos(tau)
    dx =  C*dX + S*dY
    dy = -S*dX + C*dY

    PyPR2.moveBodyTo(dx, dy, 0.0, time_to_reach)
Exemplo n.º 36
0
def onHumanTracking(tracking_objs):
    #global busymoving
    #SHOOTING_TAG = 0
    #global start_time,last_action_counter,movement_tracker,msgTryTimer,d,x,y,track_x,track_y,track_d,elapsed_time,focus_obj,HUMAN_DETECTION_COUNTER
    global a2, b2, NEW_INTERACTION_INITIALISER, NumPeople, HumanPresence, x, y, msgTryTimer
    NumPeople = len(tracking_objs)
    a2 += 1
    if len(tracking_objs) == 0:
        NEW_INTERACTION_INITIALISER += 1
        if HumanPresence == True:
            PyPR2.removeTimer(msgTryTimer)

        if NEW_INTERACTION_INITIALISER > 500:
            restoreInitialState()

        HumanPresence = False

    elif len(tracking_objs) > 0:
        if HumanPresence == False:

            PyPR2.onTimer = timerActions
            msgTryTimer = PyPR2.addTimer(1, -1, 0.5)
            b2 += 1
            #st_time = time.time()
            #no_objTracker.append(elapsed_time)
            HumanPresence = True

        object_index = closest_obj_index(tracking_objs)
        focus_obj = tracking_objs[object_index]
        x = focus_obj['est_pos'][0]
        y = focus_obj['est_pos'][1]

        mid_x = focus_obj['bound'][0] + focus_obj['bound'][2] / 2

        mid_y = focus_obj['bound'][1] + focus_obj['bound'][3] / 2

        ofs_x = mid_x - 320
        ofs_y = mid_y - 240
        chx = chy = 0.0

        if math.fabs(ofs_x) > 10:
            chx = -ofs_x * 90.0 / 640 * 0.01745329252
        #head_yaw_list.append(chx)

        if math.fabs(ofs_y) > 10:
            chy = ofs_y * 90.0 / 640 * 0.01745329252

        PyPR2.updateHeadPos(chx, chy)
Exemplo n.º 37
0
def respond( question ):
  q = question.strip().lower()

  if 'ip' in q or 'addr' in q:
    return "My IP Address is %s." % PyPR2.getMyIPAddress()
  elif 'tuck' in q and 'arm' in q:
    PyPR2.tuckBothArms()
    return "I'm tucking my arms."
  elif 'battery' in q and ('how much' in q or 'status' in q):
    (batpc, isplug, timeremain) = PyPR2.getBatteryStatus()
    if isplug != 'unplugged':
      return "I'm currently %s with %d percent battery power and finish charging in approximately %d minutes." % (isplug, batpc, timeremain % 60 )
    else:
      return "I'm currently %s with %d percent battery power and running out battery in approximately %d minutes." % (isplug, batpc, timeremain % 60 )

  return "I don't understand your request."
Exemplo n.º 38
0
def restoreInitialState():
    global LostConnectionCounter, NumPeople, tracking_data, NEW_INTERACTION_INITIALISER, csvFile, csvFileCounter, last_x

    PyPR2.tuckBothArms()  #for the time being

    #NumPeople = 0
    tracking_data = [
        'focus_x', 'focus_y', 'avg_x', 'avg_y', 'time', 'speechCommand',
        'actionCounter'
    ]
    NEW_INTERACTION_INITIALISER = 0
    csvFileCounter += 1
    csvFile = "/home/demoshare/sid_stuff/Nov26TrackData/" + str(
        csvFileCounter) + ".csv"  #change the approprite csvFile
    LostConnectionCounter = 0
    last_x = 0
Exemplo n.º 39
0
def larm_joints(in_degrees = True):
    if in_degrees:
        gain = 180.0/math.pi
    else:
        gain = 1.0     
    lajd = PyPR2.getArmJointPositions(True)
    return(gain*gen_larm_joint_vector_from_dic(lajd))
Exemplo n.º 40
0
def onHumanTracking(tracking_objs):

	global HUMAN_COUNTER,track_data,msgTryTimer,x,y,d,st_time
	if len(tracking_objs) !=0:
		if HUMAN_COUNTER ==0:
			
			PyPR2.onTimer = timerActions
			msgTryTimer = PyPR2.addTimer(1,-1,2)

			elapsed_time = time.time() - st_time
			no_objTracker.append((elapsed_time,time.time()))
			HUMAN_COUNTER= len(tracking_objs)


		object_index = closest_obj_index(tracking_objs)
		focus_obj = tracking_objs[object_index]

		x = focus_obj['est_pos'][0]
		y = focus_obj['est_pos'][1]
		d = math.sqrt((math.pow(x,2))+(math.pow(y,2)))
		
#		track_y.append(y)
	
		mid_x = focus_obj['bound'][0] + focus_obj['bound'][2] / 2
      			
		mid_y = focus_obj['bound'][1] + focus_obj['bound'][3] / 2
     				#print "track obj {} mid pt ({}.{})".format(focus_obj['track_id'],mid_x,mid_y)
      		ofs_x = mid_x - 320
      		ofs_y = mid_y - 240
      		chx = chy = 0.0
			
      		if math.fabs(ofs_x) > 10:
      			chx = -ofs_x * 90.0 / 640 * 0.01745329252	
				#head_yaw_list.append(chx)
				
      		if math.fabs(ofs_y) > 10:
 			chy = ofs_y * 90.0 / 640 * 0.01745329252
		PyPR2.updateHeadPos( chx, chy )

	else:
		
		if HUMAN_COUNTER !=0:
			PyPR2.removeTimer(msgTryTimer)
			#msgTryTimer = -1
			st_time = time.time()
			HUMAN_COUNTER =0
def pos_rarm_grip_wrt_tor():
    '''
    Returns the right arm gripper position vector(endeffector position) with respect to the (torso) at the origin. (Refer to pos_larm_grip_wrt_tor())
    '''
    
    p0 = PyPR2.getRelativeTF('base_link' , 'r_shoulder_pan_link')['position']  
    p  = p0 + pos_rarm_grip_wrt_tor_shpan() + numpy.array([0.0, 0.0, 0.051])
    
    return p
def pos_rarm_grip():
    '''
    Returns the global position of the right arm gripper finger tip
    '''
    pr = vecmat.as_vector(PyPR2.getRelativeTF('base_footprint' , 'r_gripper_r_finger_tip_link')['position'])
    pl = vecmat.as_vector(PyPR2.getRelativeTF('base_footprint' , 'r_gripper_l_finger_tip_link')['position'])
    p  = (pl + pr)/2

    orient = geo.Orientation_3D(PyPR2.getRobotPose()['orientation'], representation = 'quaternion')
    pg     = numpy.dot(orient.matrix(), p) # p global is Rb Multiply by p

    p0 = PyPR2.getRobotPose()['position']

    x = pg[0] + p0[0]
    y = pg[1] + p0[1]
    z = pg[2] + p0[2]

    pos = numpy.array([x,y,z])
    return(pos)    
Exemplo n.º 43
0
 def onHumanTracking( self,tracking_objs ):
   focus_obj = None
   for obj in tracking_objs:
     if obj['est_pos'][0] < 4 and obj['est_pos'][0] >= 0:
       if not focus_obj or focus_obj['est_pos'][0] > obj['est_pos'][0]:
         focus_obj = obj
 
       if focus_obj:
     	  mid_x = focus_obj['bound'][0] + focus_obj['bound'][2] / 2
         mid_y = focus_obj['bound'][1] + focus_obj['bound'][3] / 2
     #print "track obj {} mid pt ({}.{})".format(focus_obj['track_id'],mid_x,mid_y)
         ofs_x = mid_x - 320
         ofs_y = mid_y - 240
         chx = chy = 0.0
         if math.fabs(ofs_x) > 10:
            chx = -ofs_x * 90.0 / 640 * 0.01745329252
     	  if math.fabs(ofs_y) > 10:
            chy = ofs_y * 90.0 / 640 * 0.01745329252
         PyPR2.updateHeadPos( chx, chy )
def body_angle(in_degrees=True ):
    if in_degrees:
        gain = 180.0/math.pi
    else:
        gain = 1.0     

    orient = geo.Orientation_3D(PyPR2.getRobotPose()['orientation'], representation = 'quaternion')
    tau = orient.angle()
    u   = orient.axis()
    return (gen.sign(u[2])*gain*tau)
def rarm_elbow_flex_joint(in_degrees = True):
    '''
    returns the current angle of the shoulder pan joint of the right arm
    '''
    if in_degrees:
        gain = 180.0/math.pi
    else:
        gain = 1.0     
    rajd = PyPR2.getArmJointPositions(False)
    return(gain*rajd['r_elbow_flex_joint'])
def larm_elbow_flex_joint(in_degrees = True):
    '''
    returns the current angle of the shoulder pan joint of the left arm
    '''
    if in_degrees:
        gain = 180.0/math.pi
    else:
        gain = 1.0     
    lajd = PyPR2.getArmJointPositions(True)
    return(gain*lajd['l_elbow_flex_joint'])
def larm_shoulder_lift_joint(in_degrees = True):
    '''
    returns the current angle of the shoulder lift joint of the left arm
    '''
    if in_degrees:
        gain = 180.0/math.pi
    else:
        gain = 1.0     
    rajd = PyPR2.getArmJointPositions(True)
    return(gain*rajd['l_shoulder_lift_joint'])