def driveToObs(): tmpDist = distCheck() while tmpDist >= 20: print("I am in while") movement.forward(0.2) tmpDist = distCheck() return tmpDist
def grabTribble(): print "grabTribble()" move.armDown() move.forward(50, 1.0) move.clawClosed() move.armUp() move.clawOpen() print "grabbed tribble..."
def test_movement(): # Used to see if movements and their defaults function as intended print "Testing movement\n" m.forward() msleep(500) # Using msleep() instead of wait() to make sure each command turns off its wheels m.backwards() msleep(500) m.turn_left() msleep(500) m.turn_right() msleep(500) print "Testing complete. Exiting...\n" exit(86)
def gotoTribble(): print "gotoTribble()" tribbleNear = False while not tribbleNear: move.forward(80, 0.2) centered = False while not centered: pos = sensor.getRedPosition() print "Pos: ", pos if pos > -10 & pos < 10: centered = True elif pos > 0: move.spinRight() else: move.spinLeft() tribbleNear = sensor.pomInRange() print "near tribble.."
os.environ['SDL_VIDEODRIVER'] = 'dummy' pygame.init() pygame.display.set_mode((1, 1)) condition = True st = 0.5 camera.init() #movement.car.init() print('HAJIMAE') while condition: for event in pygame.event.get(): #print(event) if event.type == pygame.KEYDOWN: print(event.key) if event.key == pygame.K_UP: print('UP') movement.forward(st) os.chdir('/home/pi/Desktop/New_beginning/w') camera.take_picture() elif event.key == pygame.K_DOWN: movement.reverse(st) print('DOWN') elif event.key == pygame.K_LEFT: os.chdir('/home/pi/Desktop/New_beginning/a') camera.take_picture() movement.left(st) print('LEFT') elif event.key == pygame.K_RIGHT: os.chdir('/home/pi/Desktop/New_beginning/d') camera.take_picture() movement.right(st)
def handleMessage(msg, data): global server, tilt, yaw print("Incoming", msg, data) if msg == Messages.MSG_READ_SENSORS: irL = jsonBool(robohat.irLeft()) irR = jsonBool(robohat.irRight()) lineL = jsonBool(robohat.irLeftLine()) lineR = jsonBool(robohat.irRightLine()) sonar = robohat.getDistance() cmpCal = jsonBool(micro_api.isCompassCalibrated()) print "Read sensors!" msg = '{{"msg":{0},"data":{{"irL":{1},"irR":{2},"lineL":{3},"lineR":{4},"dist":{5},"cmpCal":{6}}}}}' msg = msg.format(Messages.MSG_SENSOR_DATA, irL, irR, lineL, lineR, sonar, cmpCal) server.send(msg) elif msg == Messages.MSG_FORWARD or msg == Messages.MSG_REVERSE: revs = 2 speed = 40 clicks = move.getClicks() if data != None: if "r" in data: revs = data["r"] if "s" in data: speed = data["s"] if msg == Messages.MSG_FORWARD: print "Forward!" move.forward(revs, speed) else: print "Reverse!" move.reverse(revs, speed) newClicks = move.getClicks() irL = jsonBool(robohat.irLeft()) irR = jsonBool(robohat.irRight()) totClicks = newClicks - clicks reply = '{{"msg":{0},"data":{{"msg":{1},"clicks":{2},"revs":{3},"l":{4},"r":{5}}}}}' reply = reply.format(Messages.MSG_OK_DONE, msg, totClicks, (float(totClicks / move.CLICKS_PER_REV)), irL, irR) server.send(reply) elif msg == Messages.MSG_LEFT or msg == Messages.MSG_RIGHT: revs = 0.5 speed = 100 clicks = move.getClicks() if data != None: if "r" in data: revs = data["r"] if "s" in data: speed = data["s"] if msg == Messages.MSG_LEFT: print "Left!" move.left(revs, speed) else: print "Right!" move.right(revs, speed) newClicks = move.getClicks() totClicks = newClicks - clicks irL = jsonBool(robohat.irLeft()) irR = jsonBool(robohat.irRight()) reply = '{{"msg":{0},"data":{{"msg":{1},"clicks":{2},"revs":{3},"l":{4},"r":{5}}}}}' reply = reply.format(Messages.MSG_OK_DONE, msg, totClicks, (float(totClicks / move.CLICKS_PER_REV)), irL, irR) server.send(reply) elif msg == Messages.MSG_SONAR_UP: print "Sonar up!" tilt.up() elif msg == Messages.MSG_SONAR_CENTRE: print "Sonar centre!" tilt.centre() elif msg == Messages.MSG_SONAR_DOWN: print "Sonar down!" tilt.down() elif msg == Messages.MSG_SONAR_LOW: print "Sonar low!" tilt.low() elif msg == Messages.MSG_SONAR_MID: print "Sonar middle!" yaw.mid() elif msg == Messages.MSG_SONAR_LEFT: print "Sonar left!" yaw.left() elif msg == Messages.MSG_SONAR_RIGHT: print "Sonar right!" yaw.right() elif msg == Messages.MSG_SONAR_SCAN: sonarScan() elif msg == Messages.MSG_PARK_SONAR: yaw.mid() tilt.park() elif msg == Messages.MSG_FLOOR_SONAR: yaw.mid() tilt.floor() elif msg == Messages.MSG_GET_CLICKS: clicks = move.getClicks() msg = '{{"msg":{0},"data":{{"clicks":{1}}}}}' msg = msg.format(Messages.MSG_CLICK_DATA, clicks) server.send(msg) elif msg == Messages.MSG_RESET_CLICKS: move.resetClicks() elif msg == Messages.MSG_TILT_PERCENT or msg == Messages.MSG_YAW_PERCENT: v = 50 if data != None: if "v" in data: val = data["v"] if msg == Messages.MSG_TILT_PERCENT: print "Tilt", val tilt.percentage(val) else: print "Yaw", val yaw.percentage(val) reply = '{{"msg":{0},"data":{{"msg":{1}}}}}' reply = reply.format(Messages.MSG_OK_DONE, msg) server.send(reply) elif msg == Messages.MSG_DETAIL_SCAN: hGran = 5 vGran = 5 width = 80 height = 60 x = 50 y = 40 if data != None: if "hG" in data: hGran = data["hG"] if "vG" in data: vGran = data["vG"] if "w" in data: width = data["w"] if "h" in data: height = data["h"] if "x" in data: x = data["x"] if "y" in data: y = data["y"] print "Scan:", hGran, ",", vGran detailedSonarScan(hGran, vGran, width, height, x, y) elif msg == Messages.MSG_FIND_WALL: print "Find wall!" clicks = move.getClicks() yaw.mid() tilt.park() move.findWall(35) newClicks = move.getClicks() totClicks = newClicks - clicks irL = jsonBool(robohat.irLeft()) irR = jsonBool(robohat.irRight()) reply = '{{"msg":{0},"data":{{"msg":{1},"clicks":{2},"revs":{3},"l":{4},"r":{5}}}}}' reply = reply.format(Messages.MSG_OK_DONE, msg, totClicks, (float(totClicks / move.CLICKS_PER_REV)), irL, irR) server.send(reply) elif msg == Messages.MSG_COMPASS_CALIBRATE: print "[INFO] Start compass calibration" micro_api.calibrateCompass() elif msg == Messages.MSG_COMPASS_GET_HEADING: heading = micro_api.getCompassHeading() reply = '{{"msg":{0},"data":{{"heading":{1}}}}}' print "[INFO] Compass heading: {}".format(heading) reply = reply.format(Messages.MSG_COMPASS_HEADING, heading) server.send(reply) else: print "[WARN] Not handled!", msg
import movement as m p = 0 tf = 1.6 while p == 0: a = input('') # previously raw_input() if a == 'w': m.forward(tf) if a == 's': m.reverse(tf) if a == 'd': m.right(tf) if a == 'a': m.left(tf) if a == 'p': m.left(tf)
correct = 'w' while correct != 'q': camera.take_picture() image = Image.open('test.jpg') image = np.array(image) p = model.predict(np.expand_dims(image, axis=0)) print(p) p = np.argmax(p, axis=1) p = p[0] print(p) correct = input("Enter correct prediction") # previously raw_input() if correct == 'w': os.rename(str("/test.jpg"), str("/images/test/test" + str(i) + ".jpg")) movement.forward(tf) elif correct == 'd': os.rename(str("~/Documents/Self-driving-car/" + imageName), str("~/Documents/Self-driving-car/images/test/" + imageName)) movement.right(tf) elif correct == 'a': os.rename( str("/home/pi/Documents/Self-driving-car/" + imageName), str("home/pi/Documents/Self-driving-car/images/test/" + imageName)) movement.left(tf) elif correct == 'q': camera.end() movement.end()
print("Wait please") p = 0 tf = 0.5 camera.init() #movement_test.init() model = load_model('my_model2.h5') print("Camera initialized, go ahead!") while p != 4: camera.take_picture_test() image = Image.open('test.jpg') Image._show(image) image = np.array(image) p = model.predict(np.expand_dims(image, axis=0)) print(p) p = np.argmax(p, axis=1) p = p[0] if p == 0: movement_test.left(tf) print("left") elif p == 1: movement_test.forward(tf) print("forward") elif p == 2: movement_test.right(tf) print("right")
def getOutOfStartBox() : print "getOutOfStartBox()" move.forward() sleep(2.0)