def on_step(): x, y, theta = io.get_position(cheat=True) robot.slime_x.append(x) robot.slime_y.append(y) checkoff.update(globals()) # the following lines compute the robot's current position and angle current_point = util.Point(x,y).add(robot.initial_location) current_angle = theta forward_v, rotational_v = drive(robot.path, current_point, current_angle) io.set_forward(forward_v) io.set_rotational(rotational_v)
def on_step(): if not REAL_ROBOT: checkoff.update(globals()) global confident, target_x, target_theta sonars = io.get_sonars() pose = io.get_position(not REAL_ROBOT) (px, py, ptheta) = pose if confident: if target_theta is None: target_theta = util.fix_angle_plus_minus_pi(ptheta+math.pi/2) ptheta = util.fix_angle_plus_minus_pi(ptheta) if px>target_x+.05 and abs(ptheta)<math.pi/4: io.Action(fvel=-0.2,rvel=0).execute() #drive backwards if necessary elif px<target_x and abs(ptheta)<math.pi/4: io.Action(fvel=0.2,rvel=0).execute() #drive to desired x location elif ptheta<target_theta-.05: io.Action(fvel=0,rvel=0.2).execute() #rotate elif sonars[3]>.3: io.Action(fvel=0.2,rvel=0).execute() #drive into spot else: io.Action(fvel=0,rvel=0).execute() #congratulate yourself (or call insurance company) return # Quality metric. Important to do this before we update the belief state, because # it is always a prediction if not REAL_ROBOT: parkingSpaceSize = .75 robotWidth = 0.3 margin = (parkingSpaceSize - robotWidth) / 2 robot.probMeasures.append(estimate_quality_measure(robot.estimator.belief, x_min, x_max, num_states, margin, px)) trueS = discretize(px, (x_max - x_min)/num_states, valueMin = x_min) trueS = clip(trueS, 0, num_states-1) n = len(robot.probMeasures) # current discretized sonar reading left = discretize(sonars[0], sonarMax/num_observations, num_observations-1) if not REAL_ROBOT: robot.data.append((trueS, ideal[trueS], left)) # obsProb obsProb = sum([robot.estimator.belief.prob(s) * OBS_MODEL_TO_USE(s).prob(left) \ for s in range(num_states)]) # GRAPHICS if robot.g is not None: # redraw ideal distances (compensating for tk bug on macs) # draw robot's true state if not REAL_ROBOT: if trueS < num_states: robot.g.updateDist() robot.g.updateTrueRobot(trueS) # update observation model graph robot.g.updateObsLabel(left) robot.g.updateObsGraph([OBS_MODEL_TO_USE(s).prob(left) \ for s in range(num_states)]) robot.estimator.update(left) (location, confident) = confident_location(robot.estimator.belief) if confident: target_x = (parking_spot-location)*(x_max-x_min)/float(num_states) + px print('I am at x =',location,': proceeding to parking space') if not REAL_ROBOT: checkoff.localized(globals()) # GRAPHICS if robot.g is not None: # update world drawing # update belief graph robot.g.updateBeliefGraph([robot.estimator.belief.prob(s) for s in range(num_states)]) # DL6 Angle Controller (distanceRight, theta) = sonarDist.get_distance_right_and_angle(sonars) if not theta: theta = 0 e = desiredRight-distanceRight ROTATIONAL_VELOCITY = Kp*e - Ka*theta io.set_forward(FORWARD_VELOCITY) io.set_rotational(ROTATIONAL_VELOCITY)
def on_step(): global inp robot.count += 1 inp = io.SensorInput(cheat=True) # discretize sonar readings # each element in discrete_sonars is a tuple (d, cells) # d is the distance measured by the sonar # cells is a list of grid cells (r,c) between the sonar and the point d meters away discrete_sonars = [] for (sonar_pose, distance) in zip(sonarDist.sonar_poses,inp.sonars): if NOISE_ON: distance = noiseModel.noisify(distance, grid_square_size) if distance >= 5: sensor_indices = robot.map.point_to_indices(inp.odometry.transformPose(sonar_pose).point()) hit_indices = robot.map.point_to_indices(sonarDist.sonar_hit(1, sonar_pose, inp.odometry)) ray = util.line_indices(sensor_indices, hit_indices) discrete_sonars.append((distance, ray)) continue sensor_indices = robot.map.point_to_indices(inp.odometry.transformPose(sonar_pose).point()) hit_indices = robot.map.point_to_indices(sonarDist.sonar_hit(distance, sonar_pose, inp.odometry)) ray = util.line_indices(sensor_indices, hit_indices) discrete_sonars.append((distance, ray)) # figure out where robot is start_point = inp.odometry.point() startCell = robot.map.point_to_indices(start_point) def plan_wont_work(): for point in robot.plan: if not robot.map.is_passable(point): return True return False # if necessary, make new plan if robot.plan is None or plan_wont_work(): #discrete_sonars[3][0] < 0.3: ###### or if the robot is about to crash into a wall print('REPLANNING') robot.plan = search.uniform_cost_search(robot.successors, robot.map.point_to_indices(inp.odometry.point()), lambda x: x == robot.goalIndices, lambda x: 0) # make the path less jittery #print(robot.plan) to_remove = [] if robot.plan is not None: for i in range(len(robot.plan)-2): line = util.LineSeg(util.Point(robot.plan[i][0], robot.plan[i][1]), util.Point(robot.plan[i+1][0], robot.plan[i+1][1])) if line.dist_to_point(util.Point(robot.plan[i+2][0], robot.plan[i+2][1])) < 0.1: to_remove.append(robot.plan[i+1]) print(to_remove) for i in to_remove: robot.plan.remove(i) #print(robot.plan) #print() # graphics (draw robot's plan, robot's location, goal_point) # do not change this block for w in robot.windows: w.redraw_world() robot.windows[-1].mark_cells(robot.plan,'blue') robot.windows[-1].mark_cell(robot.map.point_to_indices(inp.odometry.point()),'gold') robot.windows[-1].mark_cell(robot.map.point_to_indices(goal_point),'green') # update map for (d,cells) in discrete_sonars: #print(cells) if d < 5: robot.map.sonar_hit(cells[-1]) for i in range(len(cells)-1): robot.map.sonar_pass(cells[i]) # if we are within 0.1m of the goal point, we are done! if start_point.distance(goal_point) <= 0.1: io.Action(fvel=0,rvel=0).execute() code = checkoff.generate_code(globals()) if isinstance(code, bytes): code = code.decode() raise Exception('Goal Reached!\n\n%s' % code) # otherwise, figure out where to go, and drive there destination_point = robot.map.indices_to_point(robot.plan[0]) while start_point.isNear(destination_point,0.1) and len(robot.plan)>1: robot.plan.pop(0) destination_point = robot.map.indices_to_point(robot.plan[0]) currentAngle = inp.odometry.theta angleDiff = start_point.angleTo(destination_point)-currentAngle angleDiff = util.fix_angle_plus_minus_pi(angleDiff) fv = rv = 0 if start_point.distance(destination_point) > 0.1: if abs(angleDiff) > 0.1: rv = 4*angleDiff else: fv = 4*start_point.distance(destination_point) io.set_forward(fv) io.set_rotational(rv) # render the drawing windows # do not change this block for w in robot.windows: w.render()
def on_step(): if not REAL_ROBOT: checkoff.update(globals()) global confident, target_x, target_theta sonars = io.get_sonars() pose = io.get_position(not REAL_ROBOT) (px, py, ptheta) = pose if confident: if target_theta is None: target_theta = util.fix_angle_plus_minus_pi(ptheta + math.pi / 2) ptheta = util.fix_angle_plus_minus_pi(ptheta) if px > target_x + .05 and abs(ptheta) < math.pi / 4: io.Action(fvel=-0.2, rvel=0).execute() #drive backwards if necessary elif px < target_x and abs(ptheta) < math.pi / 4: io.Action(fvel=0.2, rvel=0).execute() #drive to desired x location elif ptheta < target_theta - .05: io.Action(fvel=0, rvel=0.2).execute() #rotate elif sonars[3] > .3: io.Action(fvel=0.2, rvel=0).execute() #drive into spot else: io.Action(fvel=0, rvel=0).execute( ) #congratulate yourself (or call insurance company) return # Quality metric. Important to do this before we update the belief state, because # it is always a prediction if not REAL_ROBOT: parkingSpaceSize = .75 robotWidth = 0.3 margin = (parkingSpaceSize - robotWidth) / 2 robot.probMeasures.append( estimate_quality_measure(robot.estimator.belief, x_min, x_max, num_states, margin, px)) trueS = discretize(px, (x_max - x_min) / num_states, valueMin=x_min) trueS = clip(trueS, 0, num_states - 1) n = len(robot.probMeasures) # current discretized sonar reading left = discretize(sonars[0], sonarMax / num_observations, num_observations - 1) if not REAL_ROBOT: robot.data.append((trueS, ideal[trueS], left)) # obsProb obsProb = sum([robot.estimator.belief.prob(s) * OBS_MODEL_TO_USE(s).prob(left) \ for s in range(num_states)]) # GRAPHICS if robot.g is not None: # redraw ideal distances (compensating for tk bug on macs) # draw robot's true state if not REAL_ROBOT: if trueS < num_states: robot.g.updateDist() robot.g.updateTrueRobot(trueS) # update observation model graph robot.g.updateObsLabel(left) robot.g.updateObsGraph([OBS_MODEL_TO_USE(s).prob(left) \ for s in range(num_states)]) robot.estimator.update(left) (location, confident) = confident_location(robot.estimator.belief) if confident: target_x = (parking_spot - location) * (x_max - x_min) / float(num_states) + px print('I am at x =', location, ': proceeding to parking space') if not REAL_ROBOT: checkoff.localized(globals()) # GRAPHICS if robot.g is not None: # update world drawing # update belief graph robot.g.updateBeliefGraph( [robot.estimator.belief.prob(s) for s in range(num_states)]) # DL6 Angle Controller (distanceRight, theta) = sonarDist.get_distance_right_and_angle(sonars) if not theta: theta = 0 e = desiredRight - distanceRight ROTATIONAL_VELOCITY = Kp * e - Ka * theta io.set_forward(FORWARD_VELOCITY) io.set_rotational(ROTATIONAL_VELOCITY)