コード例 #1
0
ファイル: bounce.py プロジェクト: davesnowdon/nao-recorder
def main(hostname, portnumber):
    print "Connecting to {}:{}".format(hostname, portnumber)
    b = broker.Broker('NaoRecorder', naoIp=hostname, naoPort=portnumber)
    if b:
        env = naoenv.make_environment(None)
        memory.subscribeToEvent("HandLeftBackTouched", callback)
        memory.subscribeToEvent("FrontTactilTouched", print_events)
        print "waiting for events\n"
        time.sleep(120)
        print_events()
        print "done\n"
コード例 #2
0
ファイル: core.py プロジェクト: VRDate/nao-recorder
 def enable_speech_recognition(self):
     if "WordRecognized" in self.event_handlers:
         memory.subscribeToEvent("WordRecognized", self.event_handlers["WordRecognized"])
コード例 #3
0
ファイル: core.py プロジェクト: VRDate/nao-recorder
 def do_subscribe(self):
     if self.event_handlers:
         for (key, value) in self.event_handlers.iteritems():
             memory.subscribeToEvent(key, value)
コード例 #4
0
ファイル: nao-conscious.py プロジェクト: dnajd/nao-conscious
	memory.unsubscribeToEvent('RearTactilTouched')  

# sigint
def tear_down_signal_handler(signal, frame):
    tear_down()
    sys.exit(0)

signal.signal(signal.SIGINT, tear_down_signal_handler)
	
# tactil
def tear_down_tactil_handler(dataName, value, message):
	if value==1:
		tear_down()

memory.subscribeToEvent('RearTactilTouched', tear_down_tactil_handler)



#######################
# FluentNao: example of basic event & handlers

# tactil
# def right_bumper_handler(dataName, value, message):
# 	if value==1:
# 		n.say('that is my right foot')

# def left_bumper_handler(dataName, value, message):
# 	if value==1:
# 		n.say('that is my left foot')
コード例 #5
0
ファイル: bootstrap.py プロジェクト: niceyang/FluentNao
def subscribe_callback(dataName, value, message):
	memory.subscribeToEvent('FrontTactilTouched', tactil_callback)
	memory.subscribeToEvent('WordRecognized', speech_callback)
コード例 #6
0
ファイル: bootstrap.py プロジェクト: niceyang/FluentNao
	key = 'arms out'
	if key in d and d[key] > t:
		self.nao.arms.out()

	key = 'hands open'
	if key in d and d[key] > t:
		self.nao.hands.open()

	key = 'hands closed'
	if key in d and d[key] > t:
		self.nao.hands.close()

# speech recogn
vocab = ['stand','sit', 'crouch', 'arms forward', 'arms out', 'hands open', 'hands closed']
nao.env.speechRecognition.setVocabulary(vocab, True)

# on / off
memory.subscribeToEvent('FrontTactilTouched', subscribe_callback)
memory.subscribeToEvent('RearTactilTouched', unsubscribe_callback)


# events you can use
#RightBumperPressed, LeftBumperPressed, ChestButtonPressed, FrontTactilTouched
#MiddleTactilTouched, RearTactilTouched, HotJointDetected, HandRightBackTouched, HandRightLeftTouched
#HandRightRightTouched, HandLeftBackTouched, HandLeftLeftTouched, HandLeftRightTouched
#BodyStiffnessChanged, SimpleClickOccured, DoubleClickOccured, TripleClickOccured
#WordRecognized, LastWordRecognized, SpeechDetected" https://community.aldebaran-robotics.com/doc/1-14/naoqi/audio/alspeechrecognition-api.html#ALSpeechRecognitionProxy::setVisualExpression__bCR

#broker.shutdown()
コード例 #7
0
	def subscribe_callback(dataName, value, message):
		memory.subscribeToEvent('FrontTactilTouched', tactil_callback)
		memory.subscribeToEvent('WordRecognized', speech_callback)
コード例 #8
0
    def __init__(self, nao):

        # args
        self.nao = nao 

        # class state
        self.logged_recog = {}  

	# callbacks
	def unsubscribe_callback(dataName, value, message):
		memory.unsubscribeToEvent('FrontTactilTouched')
		memory.unsubscribeToEvent('WordRecognized')

	def subscribe_callback(dataName, value, message):
		memory.subscribeToEvent('FrontTactilTouched', tactil_callback)
		memory.subscribeToEvent('WordRecognized', speech_callback)

	def tactil_callback(dataName, value, message):
		if value==1:
			print 'pressed'
		else:
			print 'released'

	def speech_callback(dataName, value, message):
		print value

		# zip into dictionary
		d = dict(zip(value[0::2], value[1::2]))
		t = .58

		#key = 'sit'
		#if key in d and d[key] > t:
	#		self.nao.sit()

		#key = 'wake'
		#if key in d and d[key] > t:
	#		self.nao.stiff()
		
	#	key = 'sleep'
	#	if key in d and d[key] > t:
	#		self.nao.relax()

		key = 'data'
		if key in d and d[key] > t:
			self.nao.say('yes what do you want')

		key = 'phasers'
		if key in d and d[key] > t:
			self.nao.say('fire when ready')

		key = 'tractor'
		if key in d and d[key] > t:
			self.nao.say('engage and pull them in')

		key = 'hailing'
		if key in d and d[key] > t:
			self.nao.say('bring up visuals')

		key = 'torpedo'
		if key in d and d[key] > t:
			self.nao.say('shields up')

		key = 'shields'
		if key in d and d[key] > t:
			self.nao.say('holding at 20 percent')

		key = 'purple'
		if key in d and d[key] > t:
			self.nao.say('jovial loves that color')

	# speech recogn
	vocab = ['sit','wake', 'sleep', 'data', 'phasers', 'tractor', 'hailing', 'torpedo', 'shields', 'purple']
	self.nao.env.speechRecognition.setVocabulary(vocab, True)

	# on / off
	memory.subscribeToEvent('FrontTactilTouched', subscribe_callback)
	memory.subscribeToEvent('RearTactilTouched', unsubscribe_callback)
コード例 #9
0
# sigint
def tear_down_signal_handler(signal, frame):
    tear_down()
    sys.exit(0)


signal.signal(signal.SIGINT, tear_down_signal_handler)


# tactil
def tear_down_tactil_handler(dataName, value, message):
    if value == 1:
        tear_down()


memory.subscribeToEvent('RearTactilTouched', tear_down_tactil_handler)

#######################
# FluentNao: example of basic event & handlers

# tactil
# def right_bumper_handler(dataName, value, message):
# 	if value==1:
# 		n.say('that is my right foot')

# def left_bumper_handler(dataName, value, message):
# 	if value==1:
# 		n.say('that is my left foot')

# memory.subscribeToEvent('RightBumperPressed', right_bumper_handler)
# memory.subscribeToEvent('LeftBumperPressed', left_bumper_handler)
コード例 #10
0
ファイル: main.py プロジェクト: davesnowdon/nao-conscious
# HELPERS


# shutdown with rear tactil
def tear_down(dataName, value, message):

	if value==1:

		# teardown
		touch_provider.tear_down()	
		time_provider.tear_down()
		face_recog_provider.tear_down()
		voice_recog_provider.tear_down()

	
memory.subscribeToEvent('RearTactilTouched', tear_down)

# setup all providers
def setup():
	
	# time: sleepy & look around
	time_provider.add_subscriber(sleepy_subscriber)
	time_provider.add_subscriber(look_around_subscriber)
	time_provider.setup()

	# tactile: laugh
	touch_provider.add_subscriber(laugh_subscriber)
	touch_provider.setup()

	# face recog
	face_recog_provider.add_subscriber(greeting_subscriber)
コード例 #11
0
        self.nao.arms.out()

    key = 'hands open'
    if key in d and d[key] > t:
        self.nao.hands.open()

    key = 'hands closed'
    if key in d and d[key] > t:
        self.nao.hands.close()


# speech recogn
vocab = [
    'stand', 'sit', 'crouch', 'arms forward', 'arms out', 'hands open',
    'hands closed'
]
nao.env.speechRecognition.setVocabulary(vocab, True)

# on / off
memory.subscribeToEvent('FrontTactilTouched', subscribe_callback)
memory.subscribeToEvent('RearTactilTouched', unsubscribe_callback)

# events you can use
#RightBumperPressed, LeftBumperPressed, ChestButtonPressed, FrontTactilTouched
#MiddleTactilTouched, RearTactilTouched, HotJointDetected, HandRightBackTouched, HandRightLeftTouched
#HandRightRightTouched, HandLeftBackTouched, HandLeftLeftTouched, HandLeftRightTouched
#BodyStiffnessChanged, SimpleClickOccured, DoubleClickOccured, TripleClickOccured
#WordRecognized, LastWordRecognized, SpeechDetected" https://community.aldebaran-robotics.com/doc/1-14/naoqi/audio/alspeechrecognition-api.html#ALSpeechRecognitionProxy::setVisualExpression__bCR

#broker.shutdown()