Beispiel #1
0
pan = Runtime.createAndStart("pan", "Servo")
tilt = Runtime.createAndStart("tilt", "Servo")
arduino.connect("/dev/ttyACM0", 57600, 8, 1, 0)
sleep(4)
arduino.attach(pan.getName(), 14)
arduino.attach(tilt.getName(), 15)

global actservox
global actservoy
actservox = 90
actservoy = 90
pan.moveTo(actservox)
tilt.moveTo(actservoy)

# create or get a handle to an OpenCV service
opencv = Runtime.create("opencv", "OpenCV")
opencv.startService()
# reduce the size - face tracking doesn't need much detail
# the smaller the faster
opencv.addFilter("Gray", "Gray")
opencv.addFilter("PyramidDown2", "PyramidDown")
opencv.addFilter("PyramidDown1", "PyramidDown")
#opencv.addFilter("PyramidUp1", "PyramidUp")

# add the face detect filter
opencv.addFilter("FaceDetect1", "FaceDetect")
dirx = 2
diry = 20
frameSkip = 0
frameSkipHuman = 0
spokeScan = False
Beispiel #2
0
# can be found ./audioFile/google/<language code>/audrey/phrase.mpe
#
# A message route is created to NOT recognize speech when the speech service is talking.
# Otherwise, initially amusing scenarios can occur such as infinite loops of
# the robot recognizing "hello", then saying "hello", then recognizing "hello"...
#
# The recognized phrase can easily be hooked to additional function such as
# changing the mode of the robot, or moving it.  Speech recognition is not
# the best interface to do finely controlled actuation.  But, it is very
# convenient to express high-level (e.g. go to center of the room) commands
#
# FYI - The memory requirements for Sphinx are a bit hefty and depending on the
# platform additional JVM arguments might be necessary e.g. -Xmx256m

# create an ear
ear = Runtime.create("ear","Sphinx")

# create the grammar you would like recognized
# this must be done before the service is started
ear.createGrammar("all open | hand close | pinch mode | open pinch | hand open | hand rest | hand open two ")
ear.startService()

# start the mouth
mouth = Runtime.createAndStart("mouth","Speech")

# set up a message route from the ear --to--> python method "heard"
ear.addListener("recognized", python.name, "heard", String().getClass()); 

# this method is invoked when something is 
# recognized by the ear - in this case we
# have the mouth "talk back" the word it recognized
Beispiel #3
0
from org.myrobotlab.service import Runtime
from org.myrobotlab.service import Roomba
from time import sleep
 
roomba = Runtime.create("roomba","Roomba")
roomba.connect( "COM9" )
roomba.startService()
roomba.startup()
roomba.control()
roomba.playNote( 72, 10 )
roomba.sleep( 200 )
roomba.goForward()
roomba.sleep( 1000 )
roomba.goBackward()
roomba.sleep( 1000 )
roomba.spinRight()
roomba.sleep( 1000 )
Beispiel #4
0
from org.myrobotlab.framework import Message

# inputTest.py
# example script for MRL showing Python Service
# input method.  Input is a hook which allows
# other services to send data to your script.
# This script will also show a "Message" which
# is the basic form of communication between all
# Services in MRL.  Additionally it will show how to
# extract data from the message to be used in the
# script.

# Create a running instance of the Clock Service.
# <<URL>>
# Name it "clock".
clock = Runtime.create("clock", "Clock")
clock.startService()
# Create a running instance of the Log Service.
# <<URL>>
# Name it "log".
log = Runtime.create("log", "Log")
log.startService()

# ----------------------------------
# input
# ----------------------------------
# the "input" method is a Message input sink
# currently String is the only
# parameter supported - possibly the future
# non-primitive Java data types could be dynamically
# constructed through reflection and sent to the
# can be found ./audioFile/google/<language code>/audrey/phrase.mpe
#
# A message route is created to NOT recognize speech when MRL is talking.
# Otherwise, initially amusing scenarios can occur such as infinite loops of
# the robot recognizing "hello", then saying "hello", then recognizing "hello"...
#
# The recognized phrase can easily be hooked to additional function such as
# changing the mode of the robot, or moving it.  Speech recognition is not
# the best interface to do finely controlled actuation.  But, it is very
# convenient to express high-level (e.g. go to center of the room) commands
#
# FYI - The memory requirements for Sphinx are a bit hefty and depending on the
# platform additional JVM arguments might be necessary e.g. -Xmx256m

# start an ear
ear = Runtime.create("ear","Sphinx")
# create the grammar you would like recognized
# this must be done before the service is started
ear.createGrammar("go | stop | left | right | back")
ear.startService()

# start the mouth
mouth = Runtime.create("mouth","Speech")
mouth.startService()

# creat a magabot
magabot = Runtime.create("magabot","MagaBot")
magabot.init("COM8");  # initalize arduino on port specified to 9600 8n1

speaking = False
Beispiel #6
0
from org.myrobotlab.opencv import OpenCVData
from com.googlecode.javacv.cpp.opencv_core import CvPoint;
from org.myrobotlab.service import OpenCV
from org.myrobotlab.service import Arduino
from org.myrobotlab.service import Servo
from time import sleep

global servox
global servoy
global actservox
global actservoy

actservox = 90
actservoy = 90
 
tracker = Runtime.create("tracker","Tracking")
 
# create all the peer services
rotation = Runtime.create("rotation","Servo")
neck = Runtime.create("neck","Servo")
arduino = Runtime.create("arduino","Arduino")
xpid = Runtime.create("xpid","Pid");
ypid = Runtime.create("ypid","Pid");

rotationb = Runtime.create("rotationb","Servo")
neckb = Runtime.create("neckb","Servo")
xpidb = Runtime.create("xpidb","Pid");
ypidb = Runtime.create("ypidb","Pid");
 
# adjust values
arduino.connect("COM3")
Beispiel #7
0
# can be found ./audioFile/google/<language code>/audrey/phrase.mpe
#
# A message route is created to NOT recognize speech when MRL is talking.
# Otherwise, initially amusing scenarios can occur such as infinite loops of
# the robot recognizing "hello", then saying "hello", then recognizing "hello"...
#
# The recognized phrase can easily be hooked to additional function such as
# changing the mode of the robot, or moving it.  Speech recognition is not
# the best interface to do finely controlled actuation.  But, it is very
# convenient to express high-level (e.g. go to center of the room) commands
#
# FYI - The memory requirements for Sphinx are a bit hefty and depending on the
# platform additional JVM arguments might be necessary e.g. -Xmx256m

# start an ear
ear = Runtime.create("ear", "Sphinx")
# create the grammar you would like recognized
# this must be done before the service is started
ear.createGrammar("go | stop | left | right | back")
ear.startService()

# start the mouth
mouth = Runtime.create("mouth", "Speech")
mouth.startService()

# creat a magabot
magabot = Runtime.create("magabot", "MagaBot")
magabot.init("COM8")
# initalize arduino on port specified to 9600 8n1

speaking = False
from org.myrobotlab.framework import Message

# inputTest.py
# example script for MRL showing Python Service
# input method.  Input is a hook which allows
# other services to send data to your script.
# This script will also show a "Message" which
# is the basic form of communication between all
# Services in MRL.  Additionally it will show how to 
# extract data from the message to be used in the 
# script.

# Create a running instance of the Clock Service.
# <<URL>>
# Name it "clock".
clock = Runtime.create("clock","Clock")
clock.startService()
# Create a running instance of the Log Service.
# <<URL>>
# Name it "log".
log = Runtime.create("log","Log")
log.startService()

# ----------------------------------
# input
# ----------------------------------
# the "input" method is a Message input sink 
# currently String is the only
# parameter supported - possibly the future
# non-primitive Java data types could be dynamically
# constructed through reflection and sent to the 
ypid.setControllerDirection(1)
ypid.setSetpoint(120)

arduino = Runtime.createAndStart("arduino","Arduino")
pan 	= Runtime.createAndStart("pan","Servo")
tilt	= Runtime.createAndStart("tilt","Servo")
arduino.connect("COM3", 57600, 8, 1, 0)

arduino.attach(pan.getName() , 12)
arduino.attach(tilt.getName(), 13)
global actservox
global actservoy


# create or get a handle to an OpenCV service
opencv = Runtime.create("opencv","OpenCV")
opencv.startService()
# reduce the size - face tracking doesn't need much detail
# the smaller the faster
opencv.addFilter("PyramidDown1", "PyramidDown")
# add the face detect filter
opencv.addFilter("FaceDetect1", "FaceDetect")
  
def input(opencvData):
 
    #print 'found face at (x,y) ', msg_opencv_publishOpenCVData.data[0].x(), msg_opencv_publish.data[0].y()
    global x
    global y
    global sposx
    global sposy
    global posx
Beispiel #10
0
from org.myrobotlab.service import Arduino
from org.myrobotlab.service import MagaBot
from org.myrobotlab.service import Runtime

from time import sleep

# Create a running instance of the MagaBot Service.

magabot = Runtime.create("magabot", "MagaBot")
magabot.startService()
magabot.init("COM8")  # initalize arduino on port specified to 9600 8n1

for x in range(0, 20):
    magabot.sendOrder('a')
    sleep(0.5)

#magabot.sendOrder('a')
#sleep(0.5)
#magabot.sendOrder('a')
#sleep(0.5)
#magabot.sendOrder('a')
#sleep(0.5)
Beispiel #11
0
from org.myrobotlab.service import Arduino
from org.myrobotlab.service import Runtime

from time import sleep

# You should want a differential drive service associated with 
# an Arduino
# as well as anything which can do Position Encoding (interface)

arduino = Runtime.create("arduino","Arduino")

# FIXME - re-entrant and auto-save functionality
arduino.setPort("COM8")
arduino.setBaudBase(115200);

# if not loaded - load with PDE :P

# move

# report back sensor readings

# report back to simulator?

#script to control the TrashyBot platform through remote control via Xbox 360 wireless remote
#
#
#Nolan B. 1/8/16


from org.myrobotlab.service import Joystick
from org.myrobotlab.service import Arduino
from org.myrobotlab.service import Runtime
from time import sleep

#----------------------------------Web Gui--------------------------
webgui = Runtime.create("webgui", "WebGui")
webgui.autoStartBrowser(False)
Runtime.start("webgui", "WebGui")


#---------------------------------Create Services----------------------
arduino = Runtime.createAndStart("arduino","Arduino")
joystick = Runtime.createAndStart("joystick","Joystick")
motorL = Runtime.start("motorL","Motor")
motorR = Runtime.start("motorR","Motor")
log = Runtime.start("log","Log")



#----------------------Connect Peripherals-----------------------------------
joystick.setController(0); #PC only - Pi needs new
joystick.addInputListener(python)
arduino.connect("/dev/ttyACM0");
# Tell the joystick to turn on
from org.myrobotlab.service import Arduino
from org.myrobotlab.service import MagaBot
from org.myrobotlab.service import Runtime

from time import sleep

# Create a running instance of the MagaBot Service.

magabot = Runtime.create("magabot","MagaBot")
magabot.startService()
magabot.init("COM8")  # initalize arduino on port specified to 9600 8n1

for x in range(0,20):
  magabot.sendOrder('a')
  sleep(0.5)
  
#magabot.sendOrder('a')
#sleep(0.5)
#magabot.sendOrder('a')
#sleep(0.5)
#magabot.sendOrder('a')
#sleep(0.5)


Beispiel #14
0
from org.myrobotlab.opencv import OpenCVData
from com.googlecode.javacv.cpp.opencv_core import CvPoint
from org.myrobotlab.service import OpenCV
from org.myrobotlab.service import Arduino
from org.myrobotlab.service import Servo
from time import sleep

global servox
global servoy
global actservox
global actservoy

actservox = 90
actservoy = 90

tracker = Runtime.create("tracker", "Tracking")

# create all the peer services
rotation = Runtime.create("rotation", "Servo")
neck = Runtime.create("neck", "Servo")
arduino = Runtime.create("arduino", "Arduino")
xpid = Runtime.create("xpid", "PID")
ypid = Runtime.create("ypid", "PID")

rotationb = Runtime.create("rotationb", "Servo")
neckb = Runtime.create("neckb", "Servo")
xpidb = Runtime.create("xpidb", "PID")
ypidb = Runtime.create("ypidb", "PID")

# adjust values
arduino.connect("COM3")
from time import sleep
from org.myrobotlab.service import Speech
from org.myrobotlab.service import Runtime
 
#Starting the required Services
 
serial = Runtime.createAndStart("serial","Serial")
chessgame = Runtime.createAndStart("chessgame", "ChessGame")
log = Runtime.createAndStart("log", "Log")
speech = Runtime.create("speech","Speech")
 
#Configureing Speech Service
 
speech.startService()
speech.setLanguage("en")
speech.speak("Game Begins!")
 
count = 0
chessMove = ""
 
#Connecting Arduino via Serial
 
if not serial.isConnected():
    serial.connect("COM9")
 
# Adding Listeners
serial.addListener("publishByte", python.name, "input") 
chessgame.addListener("computerMoved", python.name, "voice") 
chessgame.addListener("computerMoved", log.name, "log")
chessgame.addListener("makeMove", serial.name, "write") 
 
#script to control the TrashyBot platform through remote control via Xbox 360 wireless remote
#
#
#Nolan B. 1/8/16

from org.myrobotlab.service import Joystick
from org.myrobotlab.service import Arduino
from org.myrobotlab.service import Runtime
from time import sleep

#----------------------------------Web Gui--------------------------
webgui = Runtime.create("webgui", "WebGui")
webgui.autoStartBrowser(False)
Runtime.start("webgui", "WebGui")

#---------------------------------Create Services----------------------
arduino = Runtime.createAndStart("arduino", "Arduino")
joystick = Runtime.createAndStart("joystick", "Joystick")
motorL = Runtime.start("motorL", "Motor")
motorR = Runtime.start("motorR", "Motor")
log = Runtime.start("log", "Log")

#----------------------Connect Peripherals-----------------------------------
joystick.setController(0)
#PC only - Pi needs new
joystick.addInputListener(python)
arduino.connect("/dev/ttyACM0")
# Tell the joystick to turn on
joystick.startPolling()

# attach servos to Arduino
from java.lang import String
from java.lang import Class
from java.awt import Rectangle
from org.myrobotlab.service import Runtime
from org.myrobotlab.service import OpenCV
from org.myrobotlab.opencv import OpenCVData
from com.googlecode.javacv.cpp.opencv_core import CvPoint;
from org.myrobotlab.service import OpenCV
from org.myrobotlab.service import Arduino
from org.myrobotlab.service import Servo
from time import sleep
 
 
tracker = Runtime.create("tracker","Tracking")
 
# create all the peer services
rotation = Runtime.create("rotation","Servo")
neck = Runtime.create("neck","Servo")
arduino = Runtime.create("arduino","Arduino")
xpid = Runtime.create("xpid","PID");
ypid = Runtime.create("ypid","PID");
 
# adjust values
arduino.connect("COM3")
eye = Runtime.create("eye","OpenCV")
opencv = Runtime.create("opencv","OpenCV")
opencv.startService()
opencv.setCameraIndex(1)
eye.setCameraIndex(1)
 
#i'm wondering if these are working??? they are by default...
Beispiel #18
0
from org.myrobotlab.service import Runtime
from org.myrobotlab.service import Arduino
from time import sleep

port = "COM30"
ard1 = Runtime.create("ard1", "Arduino")
ard1.connect(port)

right = 5
left = 6

ard1.pinMode(right, Arduino.OUTPUT)
ard1.pinMode(left, Arduino.OUTPUT)

speed = 200
ard1.analogWrite(left, 0);
ard1.analogWrite(right, speed);

sleep(10)

ard1.analogWrite(left, speed);
ard1.analogWrite(right, 0);

sleep(10)

ard1.analogWrite(left, 0);
ard1.analogWrite(right, 0);
# IBT uses 2 pins for control  (forward vs reverse)