def init(): print("Simple obstacle avoider") print("Press Ctrl-C to end") print() pz.init() pz.setOutputConfig(5, 3) # set output 5 to WS2812 rev = pz.getRevision() print(rev[0], rev[1]) hcsr04.init()
def initPZ(): pz.init() # Set output mode to Servo pz.setOutputConfig(pan, 2) pz.setOutputConfig(tilt, 2) # set output 5 to WS2812 pz.setOutputConfig(pixel, 3) hcsr04.init() # Centre all servos panVal = 90 tiltVal = 90 pz.setOutput(pan, panVal) pz.setOutput(tilt, tiltVal)
self.epoll = select.epoll() self.fdmap = {} def register(self, obj, flags): fd = obj.fileno() self.epoll.register(fd, flags) self.fdmap[fd] = obj def unregister(self, obj): fd = obj.fileno() del self.fdmap[fd] self.epoll.unregister(fd) def poll(self): evt = self.epoll.poll() for fd, flags in evt: yield self.fdmap[fd], flags if __name__ == "__main__": pollster = EPoll() pollster.register(Server(("", 54321), pollster), select.EPOLLIN) pz.init() pz.setOutputConfig(5, 3) # set output 5 to WS2812 hcsr04.init() while True: evt = pollster.poll() for obj, flags in evt: readwrite(obj, flags)
import time numPixels = 17 # Define various facial expressions smileData = [1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1] frownData = [1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1] grimaceData = [1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1] oooohData = [0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1] pairData = [0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1] # Initialise the Picon Zero pz.init() pz.setOutputConfig(5, 3) # set output 5 to WS2812 # Initialise the HC-SR04 sonar.init() def clearFace(): pz.setAllPixels(0, 0, 0) def showFace(data, Red, Green, Blue): for i in range(numPixels): if (data[i] > 0): pz.setPixel(i, Red, Green, Blue, False) else: pz.setPixel(i, 0, 0, 0, False) pz.updatePixels() button_delay = 0.1
mylcd = I2C_LCD_driver.lcd() #assign LCD to variable for ease of use pz.init() #initiate hardware pz.setInputConfig(0, 0) #right IR sensor is input 0 and digital pz.setInputConfig(1, 0) #left IR sensor is input 1 and digital pz.setInputConfig(2, 0) #right line sensor is input 2 and digital pz.setInputConfig(3, 0) #left line is input 3 and digital RIGHTIR = pz.readInput(0) #assign right IR to a variable LEFTIR = pz.readInput(1) #assign left IR to a variable RIGHTLINE = pz.readInput(2) #assign right line sensor to a variable LEFTLINE = pz.readInput(3) #assign left line sensor to a variable hcsr04.init() #initiate hardware RANGE = hcsr04.getDistance() #assign HC-SR04 range to variable button = Button(22) #end of hardware setup #______________________________________________________________________________ #____________________________________________________________________________________ #functions for individual tasks def remotecontrol(): #works perfect pz.stop() mylcd.lcd_display_string("Remote Control ", 1) mylcd.lcd_display_string("Press E to End ", 2)
import tty import termios import hcsr04 import select import curses from shutil import copyfile import random from bisect import bisect stdscr = curses.initscr() curses.noecho() curses.cbreak() stdscr.keypad(1) #nodelay(1) hcsr04.init() def asciiSensor( maxVal, reading ): # convert a sensor reading into an ASCII form for easy render on terminal # TODO generate an image depth map version too as opencv now includes view of images via web server # algo source https://stevendkay.wordpress.com/2009/09/08/generating-ascii-art-from-photographs-in-python/ mval=min(maxVal, reading) greyscale = [ " ", " ", ".,-", "_ivc=!/|\\~",
import piconzero as pz import hcsr04 as dist import sensorlibs as sideDist from time import sleep, time from picamera import PiCamera # setup our camera cam = PiCamera() pz.init() dist.init() # speed variables spinSpeed = 95 forwardSpeed = 32 # time variables spinTime = 0.009 forwardTime = 0.0005 # stop variables sideStop = 21 frontStop = 100 # start recording ts = str(time()) cam.vflip = True cam.hflip = True cam.start_recording("/home/pi/qmpiwars/videos/straight-" + ts + ".h264") try:
def automaticMode(): # Initialization : moves the robot to the rightmost position hcsr04.init() print "Initialization of target practice" smoothMotion(rot, rotMin) smoothMotion(tilt, tiltMin) smoothMotion(lift, liftMin) smoothMotion(grip, gripMax) # end initialization targetPos = [ ] # Keeps track of detected target locations (robot arm position) # scanning flagClose = 0 # a "click", flips to 1 when a target within grabbing distance (10 cm) is detected flagFar = 0 # a "click", flips to 1 if a target within 20 cm is detected increment = 15 # rotation and lift position increment for scanning liftRange = range(liftMin, liftMax, increment) # scanning range : lifting for rotPos in range(rotMin, rotMax, increment): # goes through the rotation range for liftPos in liftRange: # goes through the lifting range distance = texasRanger() print distance if (distance < 10) and (flagClose == 0): print "A target within range detected" flagClose = 1 targetPos.append(positions[:]) elif (distance <= 20) and (flagFar == 0): print "A target out of range detected" flagFar = 1 elif (distance > 20): flagFar = 0 flagClose = 0 smoothMotion(lift, liftPos) positions[lift] = liftPos print positions[lift] time.sleep(.02) smoothMotion(rot, rotPos) positions[rot] = rotPos liftRange = liftRange[:: -1] # reverse scanning direction for lift, makes the scanning continuous print "Initialization finished" #---- eliminate false readings------------ # monitors position of neighbouring "clicks", if they are at the same rotation or lift value, eliminate one of them as redundant. ind = 0 while ind < len(targetPos) - 1: print targetPos print ind if (targetPos[ind + 1][0] == targetPos[ind][0]) or (targetPos[ind + 1][2] == targetPos[ind][2]): del targetPos[ind] ind += 1 #---- end eliminate false readings-------- print "Number of targets within reach: " + str(len(targetPos)) print targetPos return 0 #==============================================================================
light = (light+1)%4 if (light == 0): pz.setAllPixels(0,0,0) elif (light == 1) : pz.setAllPixels(255,255,255) elif (light == 2) : pz.setAllPixels(0,255,0) elif (light == 3) : pz.setAllPixels(255,0,0) pz.init() pz.setOutputConfig(0,2) pz.setOutputConfig(5,3) pz.setBrightness(100) pz.setOutput(0,angle) ultra.init() app = Flask(__name__) @app.route('/') def index(): """Video streaming home page.""" return render_template('index.html') def gen(camera): """Video streaming generator function.""" while True: frame = camera.get_frame() yield (b'--frame\r\n' b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')