def run(self): self.robot.set_pan(0) self.robot.set_tilt(0) camera = camera_stream.setup_camera() speed_pid = PIController(proportional_constant=0.8, integral_constant=0.1 , windup_limit=100) direction_pid = PIController(proportional_constant=0.25, integral_constant=0.05, windup_limit=400) time.sleep(0.1) self.robot.servos.stop_all() print("Setup Complete") print('Radius, Radius error, speed value, direction error, direction value') for frame in camera_stream.start_stream(camera): (x, y), radius = self.process_frame(frame) self.process_control() if self.running and radius > 20: radius_error = self.correct_radius - radius speed_value = speed_pid.get_value(radius_error) direction_error = self.center - x direction_value = direction_pid.get_value(direction_error) print(f"{radius}, {radius_error}, {speed_value:.2f}, {direction_error}, {direction_value:.2f}") # Now produce left and right motor speeds self.robot.set_left(speed_value - direction_value) self.robot.set_right(speed_value + direction_value) else: self.robot.stop_motors() if not self.running: speed_pid.reset() direction_pid.reset()
def drive_distance(bot, distance, speed=80): # Use left as "primary" motor, the right is keeping up set_primary = bot.set_left primary_encoder = bot.left_encoder set_secondary = bot.set_right secondary_encoder = bot.right_encoder controller = PIController(proportional_constant=5, integral_constant=0.3) # start the motors, and start the loop set_primary(speed) set_secondary(speed) while primary_encoder.pulse_count < distance or secondary_encoder.pulse_count < distance: time.sleep(0.01) # How far off are we? error = primary_encoder.pulse_count - secondary_encoder.pulse_count adjustment = controller.get_value(error) # How fast should the motor move to get there? set_primary(int(speed - adjustment)) set_secondary(int(speed + adjustment)) # Some debug logger.debug( f"Encoders: primary: {primary_encoder.pulse_count}, secondary: {secondary_encoder.pulse_count}," f"e:{error} adjustment: {adjustment:.2f}") logger.info( f"Distances: primary: {primary_encoder.distance_in_mm()} mm, secondary: {secondary_encoder.distance_in_mm()} mm" )
def drive_distance(bot, distance, speed=80): # Use left as "primary" motor, the right is keeping up set_primary = bot.set_left primary_encoder = bot.left_encoder set_secondary = bot.set_right secondary_encoder = bot.right_encoder controller = PIController(proportional_constant=5, integral_constant=0.2) # start the motors, and start the loop set_primary(speed) set_secondary(speed) while primary_encoder.pulse_count < distance or secondary_encoder.pulse_count < distance: # Sleep a bit before calculating time.sleep(0.05) # How far off are we? error = primary_encoder.pulse_count - secondary_encoder.pulse_count adjustment = controller.get_value(error) # How fast should the motor move to get there? set_secondary(int(speed + adjustment)) # Some debug print( "Primary c:{} ({} mm)\tSecondary c:{} ({} mm) e:{} adjustment: {:.2f}" .format(primary_encoder.pulse_count, primary_encoder.distance_in_mm(), secondary_encoder.pulse_count, secondary_encoder.distance_in_mm(), error, adjustment))
def run(self): # Set pan and tilt to middle, then clear it. self.robot.set_pan(0) self.robot.set_tilt(0) # start camera camera = pi_camera_stream.setup_camera() # speed pid - based on the radius we get. speed_pid = PIController(proportional_constant=0.8, integral_constant=0.1, windup_limit=100) # direction pid - how far from the middle X is. direction_pid = PIController(proportional_constant=0.25, integral_constant=0.1, windup_limit=400) # warm up and servo move time time.sleep(0.1) # Servo's will be in place - stop them for now. self.robot.servos.stop_all() print("Setup Complete") # Main loop for frame in pi_camera_stream.start_stream(camera): (x, y), radius = self.process_frame(frame) self.process_control() if self.running and radius > 20: # The size is the first error radius_error = self.correct_radius - radius speed_value = speed_pid.get_value(radius_error) # And the second error is the based on the center coordinate. direction_error = self.center - x direction_value = direction_pid.get_value(direction_error) print( "radius: %d, radius_error: %d, speed_value: %.2f, direction_error: %d, direction_value: %.2f" % (radius, radius_error, speed_value, direction_error, direction_value)) # Now produce left and right motor speeds self.robot.set_left(speed_value - direction_value) self.robot.set_right(speed_value + direction_value) else: self.robot.stop_motors() if not self.running: speed_pid.reset() direction_pid.reset()
def run(self): self.robot.set_pan(0) self.robot.set_tilt(90) camera = camera_stream.setup_camera() direction_pid = PIController(proportional_constant=0.4, integral_constant=0.01, windup_limit=400) time.sleep(1) self.robot.servos.stop_all() print("Setup Complete") last_time = time.time() for frame in camera_stream.start_stream(camera): x, magnitude = self.process_frame(frame) self.process_control() if self.running and magnitude > self.diff_threshold: direction_error = self.center - x new_time = time.time() dt = new_time - last_time direction_value = direction_pid.get_value(direction_error, delta_time=dt) last_time = new_time print( f"Error: {direction_error}, Value:{direction_value:2f}, t: {new_time}" ) # self.last_error = direction_error # self.last_value = direction_value # speed = self.speed # speed -= abs(direction_value) / 3 self.robot.set_left(self.speed - direction_value) self.robot.set_right(self.speed + direction_value) else: self.robot.stop_motors() self.running = False direction_pid.reset() last_time = time.time()
def drive_distances(bot, left_distance, right_distance, speed=80): # We always want the "primary" to be the longest distance, therefore the faster motor if abs(left_distance) >= abs(right_distance): print("Left is primary") set_primary = bot.set_left primary_encoder = bot.left_encoder set_secondary = bot.set_right secondary_encoder = bot.right_encoder primary_distance = left_distance secondary_distance = right_distance else: print("right is primary") set_primary = bot.set_right primary_encoder = bot.right_encoder set_secondary = bot.set_left secondary_encoder = bot.left_encoder primary_distance = right_distance secondary_distance = left_distance primary_to_secondary_ratio = secondary_distance / (primary_distance * 1.0) secondary_speed = speed * primary_to_secondary_ratio print("Targets - primary: %d, secondary: %d, ratio: %.2f" % (primary_distance, secondary_distance, primary_to_secondary_ratio)) primary_encoder.reset() secondary_encoder.reset() controller = PIController(proportional_constant=5, integral_constant=0.2) # Ensure that the encoder knows which way it is going primary_encoder.set_direction(math.copysign(1, speed)) secondary_encoder.set_direction(math.copysign(1, secondary_speed)) # start the motors, and start the loop set_primary(speed) set_secondary(int(secondary_speed)) while abs(primary_encoder.pulse_count) < abs(primary_distance) or abs( secondary_encoder.pulse_count) < abs(secondary_distance): # And sleep a bit before calculating time.sleep(0.05) # How far off are we? secondary_target = primary_encoder.pulse_count * primary_to_secondary_ratio error = secondary_target - secondary_encoder.pulse_count # How fast should the motor move to get there? adjustment = controller.get_value(error) set_secondary(int(secondary_speed + adjustment)) secondary_encoder.set_direction( math.copysign(1, secondary_speed + adjustment)) # Some debug print "Primary c:{:.2f} ({:.2f} mm)\tSecondary c:{:.2f} ({:.2f} mm) t:{:.2f} e:{:.2f} s:{:.2f} adjustment: {:.2f}".format( primary_encoder.pulse_count, primary_encoder.distance_in_mm(), secondary_encoder.pulse_count, secondary_encoder.distance_in_mm(), secondary_target, error, secondary_speed, adjustment) # Stop the primary if we need to if abs(primary_encoder.pulse_count) >= abs(primary_distance): print "primary stop" set_primary(0) secondary_speed = 0
logging.basicConfig(level=logging.INFO) logging.getLogger("pid_controller").setLevel(logging.DEBUG) bot = Robot() stop_at_time = time.time() + 15 speed = 80 bot.set_left(speed) bot.set_right(speed) pid = PIController(proportional_constant=4, integral_constant=0.3) while time.time() < stop_at_time: time.sleep(0.01) # Calculate the error left = bot.left_encoder.pulse_count right = bot.right_encoder.pulse_count error = left - right # Get the speed adjustment = pid.get_value(error) right_speed = int(speed + adjustment) left_speed = int(speed - adjustment) logger.debug(f"error: {error} adjustment: {adjustment:.2f}") logger.info( f"left: {left} right: {right}, left_speed: {left_speed} right_speed: {right_speed}" ) bot.set_left(left_speed) bot.set_right(right_speed)
class FaceTrackBehavior(object): """Behavior to find and point at a face.""" def __init__(self, robot): self.robot = robot cascade_path = "/usr/share/opencv/haarcascades/haarcascade_frontalface_default.xml" self.cascade = cv2.CascadeClassifier(cascade_path) # Tuning values self.center_x = 160 self.center_y = 120 self.min_size = 20 self.pan_pid = PIController(proportional_constant=0.1, integral_constant=0.03) self.tilt_pid = PIController(proportional_constant=-0.1, integral_constant=-0.03) # Current state self.running = False def process_control(self): instruction = get_control_instruction() if instruction == "start": self.running = True elif instruction == "stop": self.running = False self.pan_pid.reset() self.tilt_pid.reset() self.robot.servos.stop_all() elif instruction == "exit": print("Stopping") exit() def find_object(self, original_frame): """Search the frame for an object. Return the rectangle of the largest by w * h""" # Make it greyscale to reduce the data used gray_img = cv2.cvtColor(original_frame, cv2.COLOR_BGR2GRAY) # Detect all the objects objects = self.cascade.detectMultiScale(gray_img) largest = 0, (0, 0, 0, 0) # area, x, y, w, h for (x, y, w, h) in objects: item_area = w * h if item_area > largest[0]: largest = item_area, (x, y, w, h) return largest[1] def make_display(self, display_frame): """Create display output, and put it on the queue""" encoded_bytes = pi_camera_stream.get_encoded_bytes_for_frame( display_frame) put_output_image(encoded_bytes) def process_frame(self, frame): # Find the largest matching object (x, y, w, h) = self.find_object(frame) # Draw a rect on the original frame, then display this cv2.rectangle(frame, (x, y), (x + w, y + w), [255, 0, 0]) self.make_display(frame) # Yield the object details return x, y, w, h def run(self): # start camera camera = pi_camera_stream.setup_camera() # warm up time time.sleep(0.1) print("Setup Complete") # Main loop for frame in pi_camera_stream.start_stream(camera): (x, y, w, h) = self.process_frame(frame) self.process_control() if self.running and h > self.min_size: # Pan pan_error = self.center_x - (x + (w / 2)) pan_value = self.pan_pid.get_value(pan_error) self.robot.set_pan(int(pan_value)) # Tilt tilt_error = self.center_y - (y + (h / 2)) tilt_value = self.tilt_pid.get_value(tilt_error) self.robot.set_tilt(int(tilt_value)) print( "x: %d, y: %d, pan_error: %d, tilt_error: %d, pan_value: %.2f, tilt_value: %.2f" % (x, y, pan_error, tilt_error, pan_value, tilt_value))
class FaceTrackBehavior: """Behavior to find and point at a face.""" def __init__(self, robot): self.robot = robot cascade_path = "/usr/local/lib/python3.7/dist-packages/cv2/data/haarcascade_frontalface_default.xml" assert os.path.exists(cascade_path), f"File {cascade_path} not found" self.cascade = cv2.CascadeClassifier(cascade_path) # Tuning values self.center_x = 160 self.center_y = 120 self.min_size = 20 self.pan_pid = PIController(proportional_constant=0.1, integral_constant=0.03) self.tilt_pid = PIController(proportional_constant=-0.1, integral_constant=-0.03) # Current state self.running = False def process_control(self): instruction = get_control_instruction() if instruction: command = instruction['command'] if command == "start": self.running = True elif command == "stop": self.running = False self.pan_pid.reset() self.tilt_pid.reset() self.robot.servos.stop_all() elif command == "exit": print("Stopping") exit() def find_object(self, original_frame): """Search the frame for an object. Return the rectangle of the largest by w * h""" gray_img = cv2.cvtColor(original_frame, cv2.COLOR_BGR2GRAY) objects = self.cascade.detectMultiScale(gray_img) largest = 0, (0, 0, 0, 0) # area, x, y, w, h for (x, y, w, h) in objects: item_area = w * h if item_area > largest[0]: largest = item_area, (x, y, w, h) return largest[1] def make_display(self, display_frame): encoded_bytes = camera_stream.get_encoded_bytes_for_frame( display_frame) put_output_image(encoded_bytes) def process_frame(self, frame): (x, y, w, h) = self.find_object(frame) cv2.rectangle(frame, (x, y), (x + w, y + w), [255, 0, 0]) self.make_display(frame) return x, y, w, h def run(self): camera = camera_stream.setup_camera() time.sleep(0.1) print("Setup Complete") for frame in camera_stream.start_stream(camera): (x, y, w, h) = self.process_frame(frame) self.process_control() if self.running and h > self.min_size: pan_error = self.center_x - (x + (w / 2)) pan_value = self.pan_pid.get_value(pan_error) self.robot.set_pan(int(pan_value)) tilt_error = self.center_y - (y + (h / 2)) tilt_value = self.tilt_pid.get_value(tilt_error) self.robot.set_tilt(int(tilt_value)) print( f"x: {x}, y: {y}, pan_error: {pan_error}, tilt_error: {tilt_error}, pan_value: {pan_value:.2f}, tilt_value: {tilt_value:.2f}" )
"""This behavior will turn to seek north, and then drive that way""" from robot_imu import RobotImu, ImuFusion from delta_timer import DeltaTimer from pid_controller import PIController from robot import Robot import imu_settings imu = RobotImu(mag_offsets=imu_settings.mag_offsets, gyro_offsets=imu_settings.gyro_offsets) fusion = ImuFusion(imu) timer = DeltaTimer() pid = PIController(0.7, 0.01) robot = Robot() base_speed = 70 # Lets head for this heading heading_set_point = 0 while True: dt, elapsed = timer.update() fusion.update(dt) heading_error = fusion.yaw - heading_set_point steer_value = pid.get_value(heading_error, delta_time=dt) print(f"Error: {heading_error}, Value:{steer_value:2f}, t: {elapsed}") robot.set_left(base_speed + steer_value) robot.set_right(base_speed - steer_value)