def do_action(self, action, tape_detections): # TODO: implement left/right/tape actions if action == "left": cmd_vel = ctrl.ControllerOutput(.5, np.pi/4) elif action =="right": cmd_vel = ctrl.ControllerOutput(.5, -np.pi/4) else: #tape detections = tape_detections distances = np.zeros(len(detections)) for i in range(len(distances)): blob = detections[i] pos = blob.xyz_mean distances[i]= np.sqrt(pos[0]**2 + pos[1]**2) idx = np.argmin(distances) #Find the index of the minimum distance pos = detections[idx].xyz_mean kp = 5 theta_dot = kp * np.arctan2(pos[1],pos[0]) cmd_vel = cmd_vel = ctrl.ControllerOutput(0.75, theta_dot) #this was modified with higher tape gain from project 4, large time improvement return cmd_vel
def update(self, time, dt, robot_state, camera_data): la = numpy.zeros(2) app = self.app if app.key_is_down(glfw.KEY_I): la += (0.5, 0) if app.key_is_down(glfw.KEY_K): la += (-0.5, 0) if app.key_is_down(glfw.KEY_J): la += (0, 2.0) if app.key_is_down(glfw.KEY_L): la += (0, -2.0) if app.key_is_down(glfw.KEY_U): la += (0.5, 1.0) if app.key_is_down(glfw.KEY_O): la += (0.5, -1.0) if numpy.any(la): return ctrl.ControllerOutput(forward_vel=la[0], angular_vel=la[1]) else: return None
def pure_pursuit(self, T_world_from_robot, T_world_from_gate): is_finished = False # TODO: implement the pure pursuit algorithm we discussed in class # Establish the position of the robot in the gate coordinate frame. T_robot_from_gate = T_world_from_robot.inverse() * T_world_from_gate robot_in_gate_frame = T_robot_from_gate.inverse().position print("Robot position in gate frame: ", robot_in_gate_frame) if robot_in_gate_frame[0] > 0: is_finished = True alpha = 0.7 target_in_gate_frame = np.array([robot_in_gate_frame[0] + alpha, 0]) target_in_robot_frame = T_robot_from_gate.transform_fwd( target_in_gate_frame) k = 2.5 omega = k * np.arctan2(target_in_robot_frame[1], target_in_robot_frame[0]) cmd_vel = ctrl.ControllerOutput(0.75, omega) return cmd_vel, is_finished
def pure_pursuit(self, T_world_from_robot, T_world_from_gate): is_finished = False # TODO: implement the pure pursuit algorithm we discussed in class # Establish the position of the robot in the gate coordinate frame. T_robot_from_gate = T_world_from_robot.inverse() * T_world_from_gate robot_in_gate_frame = T_robot_from_gate.inverse().position #print(robot_in_gate_frame[1]) #print("Robot angle cotangent in gate frame:", robot_in_gate_frame[1]/robot_in_gate_frame[0]) #print("Robot position in gate frame: ", robot_in_gate_frame) alpha = 0.7 if robot_in_gate_frame[0] > 0: is_finished = True x = robot_in_gate_frame[0] + alpha if x > 0: y = 0 else: (a,b) = self.find_cubic(T_world_from_robot, T_world_from_gate) y = a * x ** 3 + b * x ** 2 target_in_gate_frame = np.array([x,y]) target_in_robot_frame=T_robot_from_gate.transform_fwd(target_in_gate_frame) k = 2.5 omega = k * np.arctan2(target_in_robot_frame[1], target_in_robot_frame[0]) cmd_vel = ctrl.ControllerOutput(0.75, omega) return cmd_vel, is_finished
def update(self, time, dt, robot_state, camera_data): ################################################## # state transition logic elapsed = time - self.init_time # change state after just over 2 seconds to give a little time # for a brief stop after every segment is_done = elapsed.total_seconds() > 2.15 if is_done: # print out some information about relative transform # since the start of this segment world_from_cur_robot = robot_state.odom_pose world_from_prev_robot = self.init_odom_pose prev_robot_from_world = world_from_prev_robot.inverse() prev_from_cur = prev_robot_from_world * world_from_cur_robot print('done, current pose in frame of initial pose =', prev_from_cur) if self.state == 'straight': new_state = 'turn' else: new_state = 'straight' self.set_state(time, new_state, robot_state.odom_pose) ################################################## # output logic if elapsed.total_seconds() >= 2.0: # brief stop return ctrl.ControllerOutput(forward_vel=0.0, angular_vel=0.0) elif self.state == 'straight': # go straight return ctrl.ControllerOutput(forward_vel=0.5, angular_vel=0.0) else: # self.state == 'turn' # turn left return ctrl.ControllerOutput(forward_vel=0.0, angular_vel=numpy.pi / 4)
def update(self, time, dt, robot_state, camera_data): ################################################## # state transition logic elapsed = time - self.init_time is_done = (self.duration is not None and elapsed.total_seconds() >= self.duration) if self.is_virtual: scan = camera_data.scan bump_left = scan.ranges[0] < MIN_DIST bump_center = scan.ranges[len(scan.ranges) // 2] < MIN_DIST bump_right = scan.ranges[-1] < MIN_DIST else: bump_left = robot_state.bump_left bump_center = robot_state.bump_center bump_right = robot_state.bump_right if bump_left or bump_right or bump_center: print('BUMP{}{}{}'.format(' LEFT' if bump_left else '', ' CENTER' if bump_center else '', ' RIGHT' if bump_right else '')) if self.state == 'straight': if bump_center: self.set_state(time, 'back_left_big', 1.0) elif bump_right and self.state == 'straight': self.set_state(time, 'back_left', 1.0) elif bump_left and self.state == 'straight': self.set_state(time, 'back_right', 1.0) elif is_done: if self.state == 'back_left': self.set_state(time, 'left', rand_range(1.0, 2.0)) elif self.state == 'back_left_big': self.set_state(time, 'left', rand_range(2.5, 3.5)) elif self.state == 'back_right': self.set_state(time, 'right', rand_range(1.0, 2.0)) else: self.set_state(time, 'straight', None) ################################################## # output logic if self.state == 'straight': f, a = 0.6, 0.0 elif self.state == 'left': f, a = 0.0, 1.05 elif self.state == 'right': f, a = 0.0, -1.05 else: # back_left or back_right or back_left_big f, a = -0.3, 0.0 return ctrl.ControllerOutput(forward_vel=f, angular_vel=a)
def update(self, time, dt, robot_state, camera_data): # default value is NaN to suppress plots of things we don't # have blob detections for self.log_vars[:8] = numpy.nan detections = camera_data.detections for idx, color_name in enumerate(self.ALL_COLORS): if color_name not in detections: continue blobs = detections[color_name] if not len(blobs): continue biggest = blobs[0] pos = biggest.xyz_mean if pos is None: continue odom_pos = robot_state.odom_pose * pos[:2] self.log_vars[2*idx+0] = odom_pos[0] self.log_vars[2*idx+1] = odom_pos[1] if self.can_see(detections, 'orange_pylon'): mood = 'angry' elif self.can_see(detections, 'purple_ball'): mood = 'happy' else: mood = 'bored' self.mood.store_value(mood) self.log_vars[-1] = numpy.sin(time.total_seconds()*4.0*numpy.pi) return ctrl.ControllerOutput( forward_vel=0.5, angular_vel=0.5)
def update(self, time, dt, robot_state, camera_data): color_name, direction = self.COLOR_DIR_SEQUENCE[self.cur_idx] detections = camera_data.detections ################################################## # state transition logic if not self.cur_move: elapsed = time - self.init_time if elapsed.total_seconds() >= 3.0: print('done staring!') next_idx = (self.cur_idx + 1) % len(self.COLOR_DIR_SEQUENCE) self.set_state(time, idx=next_idx, move=True) elif len(detections[color_name]): blob = detections[color_name][0] angle = numpy.arctan2(blob.xyz_mean[1], blob.xyz_mean[0]) if numpy.abs(angle) < 0.05: print('found blob with area', blob.area, 'at angle', angle) self.set_state(time, move=False) ################################################## # output logic if not self.cur_move: angular_vel = 0 else: angular_vel = 0.5 * direction return ctrl.ControllerOutput(forward_vel=0, angular_vel=angular_vel)
def update(self, time, dt, robot_state, camera_data): """Go fast!!!""" return ctrl.ControllerOutput(forward_vel=0.7, angular_vel=0)