def __init__(self, brain_spec=p.BRAIN_SPEC): #State and Physics init self.agent_reset() #Think_center init self.brain_spec = brain_spec self.head = Head(brain_spec)
def test3(): head = Head() try: while True: a = raw_input("command:") head.ExecuteCommand(a) except: pass head.Shutdown()
def __init__ (self) : self.center = 50 + 50j headRotation = random.randint(-30, 30) torsoRotation = random.randint(-15, 15) h, w = random.randint(20, 35), random.randint(15, 25) diff = self.center - (w + (h * 1j))/2 self.torso = Torso(h, w).translated(diff).rotated(torsoRotation) self.head = Head(random.randint(5, 10)).rotated(headRotation) self.arms = [Limb(*self.limbDimensions()).rotated(random.randint(135, 225)).bottomRotated(random.randint(-30, 30)), Limb(*self.limbDimensions()).rotated(random.randint(-45, 45)).bottomRotated(random.randint(-30, 30))] self.legs = [Limb(*self.limbDimensions()).rotated(random.randint(70, 110)).bottomRotated(random.randint(-30, 30)), Limb(*self.limbDimensions()).rotated(random.randint(70, 110)).bottomRotated(random.randint(-30, 30))] self.attach()
def __init__(self): """ Build all of Inmoov's parts. """ #open the file json file with open(INMOOV_FILE) as json_file: json.load(json_file, object_hook=parse) self.head = Head( filter(lambda x: x.name == "head_x", servos)[0], filter(lambda x: x.name == "head_y", servos)[0]) #Right side self.right_wrist = Wrist( filter(lambda x: x.name == "left_wrist", servos)[0]) self.right_hand = Hand( Finger(filter(lambda x: x.name == "right_pinky", servos)[0]), Finger(filter(lambda x: x.name == "right_ring", servos)[0]), Finger(filter(lambda x: x.name == "right_mid", servos)[0]), Finger(filter(lambda x: x.name == "right_index", servos)[0]), Finger(filter(lambda x: x.name == "right_thumb", servos)[0])) self.right_forearm = Forearm(self.right_hand, self.right_wrist) self.right_shoulder = Shoulder( filter(lambda x: x.name == "right_shoulder_flexion", servos)[0], filter(lambda x: x.name == "right_shoulder_abduction", servos)[0], filter(lambda x: x.name == "right_shoulder_rotation_x", servos)[0], filter(lambda x: x.name == "right_shoulder_rotation_y", servos)[0]) self.right_arm = Arm(self.right_forearm, self.right_shoulder) #Left side self.left_wrist = Wrist( filter(lambda x: x.name == "left_wrist", servos)[0]) self.left_hand = Hand( Finger(filter(lambda x: x.name == "left_pinky", servos)[0]), Finger(filter(lambda x: x.name == "left_ring", servos)[0]), Finger(filter(lambda x: x.name == "left_middle", servos)[0]), Finger(filter(lambda x: x.name == "left_index", servos)[0]), Finger(filter(lambda x: x.name == "left_thumb", servos)[0])) self.left_forearm = Forearm(self.left_hand, self.left_wrist) self.left_shoulder = Shoulder( filter(lambda x: x.name == "left_shoulder_flexion", servos)[0], filter(lambda x: x.name == "left_shoulder_abduction", servos)[0], filter(lambda x: x.name == "left_shoulder_rotation_x", servos)[0], filter(lambda x: x.name == "left_shoulder_rotation_y", servos)[0]) self.left_arm = Arm(self.left_forearm, self.left_shoulder) self.initialize()
def head(**kwds): from Head import Head return Head(**kwds)
def test_remote(): from Head import Head InitCamera("pi", "picamera", StdoutLogger(), Head()) print test_serie()
import sys sys.path.append('.') from time import sleep from Head import Head head = Head() head.MoveForward() sleep(1) head.MoveStop() head.Shutdown()
def __init__(self): self.direction = 1 self.head = Head(0, 0) self.body = None
def head(self, **kwds): from Head import Head head = Head(**kwds) self.contents.append(head) return head
def main(): # Get global vars global motion_time global engine global rep_num global last_contact_time global trial_cond global subj_num # Let user input experiment trial number (1-25, defined in dictionary below) parser = argparse.ArgumentParser(description = 'Input stimulus number.') parser.add_argument('integers', metavar = 'N', type = int, nargs = '+', help = 'an integer representing exp conds') foo = parser.parse_args() trial_cond = (foo.integers[0]) # Access lookup table of conditions based on this input, assign appropriate values to global variables controlling trial conditions trial_stimuli = {1:[True,True,1.333], 2:[True,True,1.833], 3:[True,False,1.333], 4:[True,False,1.833], 5:[False,True,1.333], 6:[False,True,1.833], 7:[False,False,1.333], 8:[False,False,1.833], 9:[True,True,1.333], 10:[True,False,1.333], 11:[False,True,1.333], 12:[False,False,1.333]} # Break out selected dictionary entries into experiment trial variables robot_lead = trial_stimuli[trial_cond][0] # set Boolean for turning face animation on or off follower_coop = trial_stimuli[trial_cond][1] # set Boolean for turning physical response on or off freq = trial_stimuli[trial_cond][2] # Hz, set robot clapping frequency # Define uncooperative robot time intervals dummy_ints = [0.8, 0.4, 0.7, 0.5, 0.45, 0.65, 0.55, 0.7, 0.6, 0.5, 0.6, 0.55, 0.45, 0.7, 0.75, 0.6, 0.45, 0.8, 0.7, 0.55, 0.45, 0.4, 0.45, 0.65, 0.7, 0.45] # Intitialize a node for this overall Baxter use print("Initializing node... ") rospy.init_node("baxter_arm_motion") # Set important constants for Baxter motion cycles motion_time = 1.0/freq # cycle time for first segment of Baxter "motion" tempM = 1.0/freq # future cycle time interval estimate mult_factor = 0.0 # factor for multiplication mentioned in next line factor = mult_factor * motion_time # calculate a "factor" that makes it possible to slow Baxter down if desired engine = HandEngine() # object generated using the HandEngine.py code arm = MovingArm(1000.0,subj_num,trial_cond) # object generated using the MovingArm.py code face = Head("faces/") # object generated using the Head.py code limit = 25 # identify number of clapping cycles that user should complete elapsed_time = dummy_ints[0] # keep track of elapsed time into trial # Load face image on Baxter's screen thread.start_new_thread(face.run,(30,)) face.changeEmotion('HappyNWBlue') # Define robot start pose start_pos = {'right_s0': -0.8620972018066407, 'right_s1': 0.35665053277587894, 'right_w0': 1.1696603494262696, 'right_w1': 1.3593157223693849, 'right_w2': -0.02070874061279297, 'right_e0': 1.5132720455200197, 'right_e1': 1.9381847232788088} # Move Baxter to start pose arm._right_limb.move_to_joint_positions(start_pos) # Initialize motion of Baxter's one active join in this interaction arm.beginSinusoidalMotion("right_w1") engine.resetStartTime() shift = False # Start subscriber that listens to accelerometer values rospy.Subscriber('/robot/accelerometer/right_accelerometer/state', Imu, callback) # Set control rate control_rate = rospy.Rate(1000) # Print cue so operator knows robot is ready print 'Ready to start trial' ################################ Robot Leader Case ########################################## # Part of code for robot lead condition, either follower cooperation case if robot_lead: # Compute frequency (1/T) for robot motion cycles engine.frequency = 1 / (motion_time) # Compute appropriate amplitude of hand motion using eqn fit from human motion for_amp = engine.calculateAmplitude(motion_time) # Record the time that the experiment trial is starting trial_start_time = time.time() # Bring motion back to beginning of sinusoid arm.rezeroStart() # Do this loop during experiment trial time... while rep_num < limit and time.time() - trial_start_time < 40: # Initialize sinusoidal arm motion arm.updateSinusoidalMotion(for_amp, engine.frequency) # If engine is pinged (in case of sensed hand contact)... if engine.ping(): # Increment hand contact count rep_num = rep_num + 1 # Set reactive facial expression face.switchEmotion('ReactiveBlinkBlue','HappyNWBlue',0.15) ################################ Robot Follower Case ######################################### # Part of code for robot follow condition else: # Compute frequency (1/T) for first robot motion cycle engine.frequency = 1 / (motion_time + factor) # Compute appropriate amplitude of hand motion using eqn fit from human motion for_amp = 0 # Record the time that the experiment trial is starting trial_start_time = time.time() # Do this loop during experiment trial time... while rep_num < limit and time.time() - trial_start_time < 40: # Initialize sinusoidal arm motion arm.updateSinusoidalMotion(for_amp, engine.frequency) # Wait for first few contacts before moving robot if rep_num < 3: # If engine is pinged (in case of sensed hand contact)... if engine.ping(): # Set reactive facial expression face.switchEmotion('ReactiveBlinkBlue','HappyNWBlue',0.15) # For first few contacts, use dummy 0-amplitude curve if rep_num < 2: # Command 0-amplitude curve arm.updateSinusoidalMotion(for_amp, engine.frequency) trial_start_time = time.time() # Then get ready with the right sinusoid parameters else: tempM = engine.estimateInterval(1) # pred next motion cycle period motion_time = tempM # update cycle time engine.frequency = 1 / (motion_time + factor) # update cycle frequency arm.rezeroStart() # bring motion back to beginning of sinusoid for_amp = engine.calculateAmplitude(motion_time + (factor)) # update motion amplitude # Increment hand contact count rep_num = rep_num + 1 # Then start serious motion else: # Initialize sinusoidal arm motion arm.updateSinusoidalMotion(for_amp, engine.frequency) # Part of code for cooperative robot follower condition if follower_coop: # If engine is pinged (in case of sensed hand contact)... if engine.ping(): # Set reactive facial expression face.switchEmotion('ReactiveBlinkBlue','HappyNWBlue',0.15) # Compute estimated appropriate next motion cycle tempM = engine.estimateInterval(1) # Print predicted appropriate next cycle time, capping it at 0.3 sec minimum if tempM < 0.3: tempM = 0.3 print 'Cycle time is' print tempM # Update motion parameters if a contact was sensed in the last motion rep motion_time = tempM # update cycle time factor = mult_factor * motion_time # update padding factor that allows for slowing down engine.frequency = 1 / (motion_time + factor) # update cycle frequency for_amp = engine.calculateAmplitude(motion_time + factor) # update motion amplitude arm.rezeroStart() # bring motion back to beginning of sinusoid arm.beginSinusoidalMotion("right_w1") # start new sinusoidal motion cycle # Increment hand contact count rep_num = rep_num + 1 # Part of code for uncooperative robot follower condition else: if engine.ping(): # Set reactive facial expression face.switchEmotion('SillyBlinkBlue','HappyNWBlue',0.15) # See if enough time has elapsed to change robot motion pattern if time.time() - trial_start_time > elapsed_time: # Increment hand contact count rep_num = rep_num + 1 # Add elapsed period to the total elapsed time elapsed_time += dummy_ints[rep_num] # Update motion parameters if a contact was sensed in the last motion rep motion_time = dummy_ints[rep_num] # update cycle time factor = mult_factor * motion_time # update padding factor that allows for slowing down engine.frequency = 1 / (motion_time + factor) # update cycle frequency for_amp = engine.calculateAmplitude(motion_time + factor) # update motion amplitude arm.beginSinusoidalMotion("right_w1") # start new sinusoidal motion cycle # Define robot end pose end_pos = {'right_s0': -0.8620972018066407, 'right_s1': 0.35665053277587894, 'right_w0': 1.1696603494262696, 'right_w1': 1.8593157223693849, 'right_w2': -0.02070874061279297, 'right_e0': 1.5132720455200197, 'right_e1': 1.9381847232788088} # Load happy ending image onto Baxter's face face.changeEmotion('JoyNWPurple') # Move Baxter to end pose arm._right_limb.move_to_joint_positions(end_pos) # Visualize what just happened plt.figure() plt.plot(arm._time, arm._data1, arm._time, arm._data2) plt.show()
app.logger.debug('SHUTDOWN START') g_Head.Shutdown() g_Commands.Shutdown() app.logger.debug('SHUTDOWN OK') shutdown_server = request.environ.get('werkzeug.server.shutdown') if shutdown_server is None: raise RuntimeError('Not running with the Werkzeug Server') shutdown_server() return Response("{'status': 'ok'}", content_type='text/plain; charset=utf-8') if __name__ == "__main__": if len(sys.argv) >= 2 and sys.argv[1] == "local": from HeadLocal import HeadLocal g_Head = HeadLocal() #InitCamera("local", "cpp", app.logger) InitCamera("local", "dump", app.logger, g_Head) else: from Head import Head g_Head = Head() #InitCamera("pi", "cpp", app.logger) #InitCamera("pi", "dump", app.logger) InitCamera("pi", "picamera", app.logger, g_Head) g_Commands = Commands(g_Head, app.logger) g_Commands.start() app.run(host='0.0.0.0', port=7778, debug=True)
g_Head.Shutdown() g_Commands.Shutdown() app.logger.debug('SHUTDOWN OK') shutdown_server = request.environ.get('werkzeug.server.shutdown') if shutdown_server is None: raise RuntimeError('Not running with the Werkzeug Server') shutdown_server() return Response("{'status': 'ok'}", content_type='text/plain; charset=utf-8') if __name__ == "__main__": config = ConfigParser.ConfigParser() config.read('head.cfg') if len(sys.argv) >= 2 and sys.argv[1] == "local": from HeadLocal import HeadLocal g_Head = HeadLocal(config) else: from Head import Head g_Head = Head(config) InitCamera(app.logger, g_Head, config) g_Commands = Commands(g_Head, app.logger, config) g_Commands.start() app.run(host='0.0.0.0', port=7778, debug=True)
def addhead(self): #Добавить главу self.fromfile() self.workers.append(Head()) self.edittarget() self.infile()