def on_step(): x, y, theta = io.get_position(cheat=True) robot.slime_x.append(x) robot.slime_y.append(y) checkoff.update(globals()) # the following lines compute the robot's current position and angle current_point = util.Point(x,y).add(robot.initial_location) current_angle = theta forward_v, rotational_v = drive(robot.path, current_point, current_angle) io.set_forward(forward_v) io.set_rotational(rotational_v)
def on_step(): if not REAL_ROBOT: checkoff.update(globals()) global confident, target_x, target_theta sonars = io.get_sonars() pose = io.get_position(not REAL_ROBOT) (px, py, ptheta) = pose if confident: if target_theta is None: target_theta = util.fix_angle_plus_minus_pi(ptheta+math.pi/2) ptheta = util.fix_angle_plus_minus_pi(ptheta) if px>target_x+.05 and abs(ptheta)<math.pi/4: io.Action(fvel=-0.2,rvel=0).execute() #drive backwards if necessary elif px<target_x and abs(ptheta)<math.pi/4: io.Action(fvel=0.2,rvel=0).execute() #drive to desired x location elif ptheta<target_theta-.05: io.Action(fvel=0,rvel=0.2).execute() #rotate elif sonars[3]>.3: io.Action(fvel=0.2,rvel=0).execute() #drive into spot else: io.Action(fvel=0,rvel=0).execute() #congratulate yourself (or call insurance company) return # Quality metric. Important to do this before we update the belief state, because # it is always a prediction if not REAL_ROBOT: parkingSpaceSize = .75 robotWidth = 0.3 margin = (parkingSpaceSize - robotWidth) / 2 robot.probMeasures.append(estimate_quality_measure(robot.estimator.belief, x_min, x_max, num_states, margin, px)) trueS = discretize(px, (x_max - x_min)/num_states, valueMin = x_min) trueS = clip(trueS, 0, num_states-1) n = len(robot.probMeasures) # current discretized sonar reading left = discretize(sonars[0], sonarMax/num_observations, num_observations-1) if not REAL_ROBOT: robot.data.append((trueS, ideal[trueS], left)) # obsProb obsProb = sum([robot.estimator.belief.prob(s) * OBS_MODEL_TO_USE(s).prob(left) \ for s in range(num_states)]) # GRAPHICS if robot.g is not None: # redraw ideal distances (compensating for tk bug on macs) # draw robot's true state if not REAL_ROBOT: if trueS < num_states: robot.g.updateDist() robot.g.updateTrueRobot(trueS) # update observation model graph robot.g.updateObsLabel(left) robot.g.updateObsGraph([OBS_MODEL_TO_USE(s).prob(left) \ for s in range(num_states)]) robot.estimator.update(left) (location, confident) = confident_location(robot.estimator.belief) if confident: target_x = (parking_spot-location)*(x_max-x_min)/float(num_states) + px print('I am at x =',location,': proceeding to parking space') if not REAL_ROBOT: checkoff.localized(globals()) # GRAPHICS if robot.g is not None: # update world drawing # update belief graph robot.g.updateBeliefGraph([robot.estimator.belief.prob(s) for s in range(num_states)]) # DL6 Angle Controller (distanceRight, theta) = sonarDist.get_distance_right_and_angle(sonars) if not theta: theta = 0 e = desiredRight-distanceRight ROTATIONAL_VELOCITY = Kp*e - Ka*theta io.set_forward(FORWARD_VELOCITY) io.set_rotational(ROTATIONAL_VELOCITY)
def on_step(): if not REAL_ROBOT: checkoff.update(globals()) global confident, target_x, target_theta sonars = io.get_sonars() pose = io.get_position(not REAL_ROBOT) (px, py, ptheta) = pose if confident: if target_theta is None: target_theta = util.fix_angle_plus_minus_pi(ptheta + math.pi / 2) ptheta = util.fix_angle_plus_minus_pi(ptheta) if px > target_x + .05 and abs(ptheta) < math.pi / 4: io.Action(fvel=-0.2, rvel=0).execute() #drive backwards if necessary elif px < target_x and abs(ptheta) < math.pi / 4: io.Action(fvel=0.2, rvel=0).execute() #drive to desired x location elif ptheta < target_theta - .05: io.Action(fvel=0, rvel=0.2).execute() #rotate elif sonars[3] > .3: io.Action(fvel=0.2, rvel=0).execute() #drive into spot else: io.Action(fvel=0, rvel=0).execute( ) #congratulate yourself (or call insurance company) return # Quality metric. Important to do this before we update the belief state, because # it is always a prediction if not REAL_ROBOT: parkingSpaceSize = .75 robotWidth = 0.3 margin = (parkingSpaceSize - robotWidth) / 2 robot.probMeasures.append( estimate_quality_measure(robot.estimator.belief, x_min, x_max, num_states, margin, px)) trueS = discretize(px, (x_max - x_min) / num_states, valueMin=x_min) trueS = clip(trueS, 0, num_states - 1) n = len(robot.probMeasures) # current discretized sonar reading left = discretize(sonars[0], sonarMax / num_observations, num_observations - 1) if not REAL_ROBOT: robot.data.append((trueS, ideal[trueS], left)) # obsProb obsProb = sum([robot.estimator.belief.prob(s) * OBS_MODEL_TO_USE(s).prob(left) \ for s in range(num_states)]) # GRAPHICS if robot.g is not None: # redraw ideal distances (compensating for tk bug on macs) # draw robot's true state if not REAL_ROBOT: if trueS < num_states: robot.g.updateDist() robot.g.updateTrueRobot(trueS) # update observation model graph robot.g.updateObsLabel(left) robot.g.updateObsGraph([OBS_MODEL_TO_USE(s).prob(left) \ for s in range(num_states)]) robot.estimator.update(left) (location, confident) = confident_location(robot.estimator.belief) if confident: target_x = (parking_spot - location) * (x_max - x_min) / float(num_states) + px print('I am at x =', location, ': proceeding to parking space') if not REAL_ROBOT: checkoff.localized(globals()) # GRAPHICS if robot.g is not None: # update world drawing # update belief graph robot.g.updateBeliefGraph( [robot.estimator.belief.prob(s) for s in range(num_states)]) # DL6 Angle Controller (distanceRight, theta) = sonarDist.get_distance_right_and_angle(sonars) if not theta: theta = 0 e = desiredRight - distanceRight ROTATIONAL_VELOCITY = Kp * e - Ka * theta io.set_forward(FORWARD_VELOCITY) io.set_rotational(ROTATIONAL_VELOCITY)