def step(): sonars = io.getSonars() # current discretized sonar reading left = discretize(sonars[0], sonarMax/numObservations, numObservations-1) # GRAPHICS if robot.g is not None: # update observation model graph robot.g.updateObsLabel(left) robot.g.updateObsGraph([obsModel(s).prob(left) for s in xrange(numStates)]) # update belief state robot.estimator.update(left) # GRAPHICS if robot.g is not None: # update belief graph robot.g.updateBeliefGraph([robot.estimator.belief.prob(s) for s in xrange(numStates)]) # DL6 Angle Controller (distanceRight, theta) = sonarDist.getDistanceRightAndAngle(sonars) if not theta: theta = 0 print 'Angle too large!' e = desiredRight-distanceRight ROTATIONAL_VELOCITY = Kp*e - Ka*theta io.setForward(FORWARD_VELOCITY) io.setRotational(ROTATIONAL_VELOCITY)
def step(): sonars = io.getSonars() # current discretized sonar reading left = discretize(sonars[0], sonarMax / numObservations, numObservations - 1) # GRAPHICS if robot.g is not None: # update observation model graph robot.g.updateObsLabel(left) robot.g.updateObsGraph( [obsModel(s).prob(left) for s in xrange(numStates)]) # update belief state robot.estimator.update(left) # GRAPHICS if robot.g is not None: # update belief graph robot.g.updateBeliefGraph( [robot.estimator.belief.prob(s) for s in xrange(numStates)]) # DL6 Angle Controller (distanceRight, theta) = sonarDist.getDistanceRightAndAngle(sonars) if not theta: theta = 0 print 'Angle too large!' e = desiredRight - distanceRight ROTATIONAL_VELOCITY = Kp * e - Ka * theta io.setForward(FORWARD_VELOCITY) io.setRotational(ROTATIONAL_VELOCITY)
def step(): global confident, targetX, targetTheta inp = io.SensorInput() sonars = inp.sonars # current discretized sonar reading left = discretize(sonars[0], sonarMax / numObservations, numObservations - 1) if not confident: # GRAPHICS if robot.g is not None: # update observation model graph robot.g.updateObsLabel(left) robot.g.updateObsGraph( [obsModel(s).prob(left) for s in xrange(numStates)]) if DO_ESTIMATION: # update belief state robot.estimator.update(left) (location, confident) = confidentLocation(robot.estimator.belief) # GRAPHICS if robot.g is not None: # update belief graph robot.g.updateBeliefGraph( [robot.estimator.belief.prob(s) for s in xrange(numStates)]) if confident: targetX = (parkingSpot - location) * ( xMax - xMin) / float(numStates) + inp.odometry.x print 'I am at x =', location, ': proceeding to parking space' # DL6 Angle Controller (distanceRight, theta) = sonarDist.getDistanceRightAndAngle(sonars) if not theta: theta = 0 print 'Angle too large!' e = desiredRight - distanceRight ROTATIONAL_VELOCITY = Kp * e - Ka * theta io.setForward(FORWARD_VELOCITY) io.setRotational(ROTATIONAL_VELOCITY) else: inp.odometry.theta = util.fixAnglePlusMinusPi(inp.odometry.theta) if inp.odometry.x > targetX + .05 and abs( inp.odometry.theta) < math.pi / 4: io.Action(fvel=-0.2, rvel=0).execute() #drive backwards if necessary elif inp.odometry.x < targetX and abs( inp.odometry.theta) < math.pi / 4: io.Action(fvel=0.2, rvel=0).execute() #drive to desired x location elif inp.odometry.theta < targetTheta - .05: io.Action(fvel=0, rvel=0.2).execute() #rotate elif inp.sonars[3] > .3: io.Action(fvel=0.2, rvel=0).execute() #drive into spot else: io.Action(fvel=0, rvel=0).execute( ) #congratulate yourself (or call insurance company)
def step(): sonars = io.getSonars() (distanceRight, theta) = sonarDist.getDistanceRightAndAngle(sonars) print 'd_o =',distanceRight,' theta =',theta robot.distances.append(distanceRight) Kp = 1 Ka = 0.632 rotationalVelocity = Ka*((Kp/Ka)*(0.4 - distanceRight) - theta) io.setRotational(rotationalVelocity) computePlotValues(rotationalVelocity)
def step(): global confident, targetX, targetTheta inp = io.SensorInput() sonars = inp.sonars # current discretized sonar reading left = discretize(sonars[0], sonarMax/numObservations, numObservations-1) if not confident: # GRAPHICS if robot.g is not None: # update observation model graph robot.g.updateObsLabel(left) robot.g.updateObsGraph([obsModel(s).prob(left) for s in xrange(numStates)]) if DO_ESTIMATION: # update belief state robot.estimator.update(left) (location, confident) = confidentLocation(robot.estimator.belief) # GRAPHICS if robot.g is not None: # update belief graph robot.g.updateBeliefGraph([robot.estimator.belief.prob(s) for s in xrange(numStates)]) if confident: targetX = (parkingSpot-location)*(xMax-xMin)/float(numStates)+inp.odometry.x print 'I am at x =',location,': proceeding to parking space' # DL6 Angle Controller (distanceRight, theta) = sonarDist.getDistanceRightAndAngle(sonars) if not theta: theta = 0 print 'Angle too large!' e = desiredRight-distanceRight ROTATIONAL_VELOCITY = Kp*e - Ka*theta io.setForward(FORWARD_VELOCITY) io.setRotational(ROTATIONAL_VELOCITY) else: inp.odometry.theta = util.fixAnglePlusMinusPi(inp.odometry.theta) if inp.odometry.x>targetX+.05 and abs(inp.odometry.theta)<math.pi/4: io.Action(fvel=-0.2,rvel=0).execute() #drive backwards if necessary elif inp.odometry.x<targetX and abs(inp.odometry.theta)<math.pi/4: io.Action(fvel=0.2,rvel=0).execute() #drive to desired x location elif inp.odometry.theta<targetTheta-.05: io.Action(fvel=0,rvel=0.2).execute() #rotate elif inp.sonars[3]>.3: io.Action(fvel=0.2,rvel=0).execute() #drive into spot else: io.Action(fvel=0,rvel=0).execute() #congratulate yourself (or call insurance company)
def step(): sonars = io.getSonars() (distanceRight, theta) = sonarDist.getDistanceRightAndAngle(sonars) print 'd_o =',distanceRight,' theta =',theta if theta == None: theta = robot.rvels[-1] if distanceRight == None: distanceRight = robot.distances[-1] robot.distances.append(distanceRight) rotationalVelocity = Kp * (desiredRight - distanceRight) + Ka * (desiredAngle - theta) robot.rvels.append(rotationalVelocity) io.setRotational(rotationalVelocity)
def wall_follower_step(): sonars = io.getSonars() (distanceRight, theta) = sonarDist.getDistanceRightAndAngle(sonars) if theta == None: theta = robot.rvels[-1] if distanceRight == None: distanceRight = robot.distances[-1] print 'd_o =',distanceRight,' theta =',theta robot.distances.append(distanceRight) rotationalVelocity = Kp * (desiredRight - distanceRight) + Ka * (desiredAngle - theta) robot.rvels.append(rotationalVelocity) io.setRotational(rotationalVelocity) io.setForward(0.1)
def step(): global confident, targetX, targetTheta sonars = io.getSonars() pose = io.getPosition(True) (px, py, ptheta) = pose if confident: ptheta = util.fixAnglePlusMinusPi(ptheta) if px>targetX+.05 and abs(ptheta)<math.pi/4: io.Action(fvel=-0.2,rvel=0).execute() #drive backwards if necessary elif px<targetX and abs(ptheta)<math.pi/4: io.Action(fvel=0.2,rvel=0).execute() #drive to desired x location elif ptheta<targetTheta-.05: io.Action(fvel=0,rvel=0.2).execute() #rotate elif sonars[3]>.3: io.Action(fvel=0.2,rvel=0).execute() #drive into spot else: io.Action(fvel=0,rvel=0).execute() #congratulate yourself (or call insuran return # Quality metric. Important to do this before we update the belief state, because # it is always a prediction parkingSpaceSize = .75 robotWidth = 0.3 margin = (parkingSpaceSize - robotWidth) / 2 # Robot is about .3 meters wide. Parking place is .75 trueX = io.getPosition(True)[0] robot.probMeasures.append(estimateQualityMeasure(robot.estimator.belief, xMin, xMax, numStates, margin, trueX)) trueS = discretize(trueX, (xMax - xMin)/numStates, valueMin = xMin) n = len(robot.probMeasures) if n == 80: brainStop() # current discretized sonar reading left = discretize(sonars[0], sonarMax/numObservations, numObservations-1) robot.data.append((trueS, ideal[trueS], left)) # obsProb obsProb = sum([robot.estimator.belief.prob(s) * OBS_MODEL_TO_USE(s).prob(left) \ for s in xrange(numStates)]) # GRAPHICS if robot.g is not None: # redraw ideal distances (compensating for tk bug on macs) # draw robot's true state if trueS < numStates: robot.g.updateDist() robot.g.updateTrueRobot(trueS) # update observation model graph robot.g.updateObsLabel(left) robot.g.updateObsGraph([OBS_MODEL_TO_USE(s).prob(left) \ for s in xrange(numStates)]) robot.estimator.update(left) (location, confident) = confidentLocation(robot.estimator.belief) if confident: targetX = (parkingSpot-location)*(xMax-xMin)/float(numStates) + px print 'I am at x =',location,': proceeding to parking space' # GRAPHICS if robot.g is not None: # update world drawing # update belief graph robot.g.updateBeliefGraph([robot.estimator.belief.prob(s) \ for s in xrange(numStates)]) # DL6 Angle Controller (distanceRight, theta) = sonarDist.getDistanceRightAndAngle(sonars) if not theta: theta = 0 e = desiredRight-distanceRight ROTATIONAL_VELOCITY = Kp*e - Ka*theta io.setForward(FORWARD_VELOCITY) io.setRotational(ROTATIONAL_VELOCITY)
def getNextValues(self, state, inp): v = sonarDist.getDistanceRightAndAngle(inp.sonars) print 'Dist from robot center to wall on right', v[0] if not v[1]: print '****** Angle reading not valid ******' return (state, v)