def wall_follow_demo(): velocity.init(0.22, 40, 0.5, 0.1) leds.init() pose.init() motion.init() neighbors.init(NBR_PERIOD) state = STATE_IDLE wall_time = 0 while True: # Do updates leds.update() pose.update() velocity.update() new_nbrs = neighbors.update() nbrList = neighbors.get_neighbors() tv = 0 rv = 0 # this is the main finite-state machine if state == STATE_IDLE: leds.set_pattern('r', 'circle', LED_BRIGHTNESS) if new_nbrs: print "idle" if rone.button_get_value('r'): state = STATE_LOOK_FOR_WALL elif state == STATE_LOOK_FOR_WALL: leds.set_pattern('r', 'blink_fast', LED_BRIGHTNESS) if new_nbrs: print "look for wall" tv = MOTION_TV obs = neighbors.get_obstacles() if (obs != None): state = STATE_WALL_FOLLOW elif state == STATE_WALL_FOLLOW: leds.set_pattern('b', 'blink_fast', LED_BRIGHTNESS) if new_nbrs: print "wall follow" # follow the wall (tv, rv, active) = wall_follow(MOTION_TV / 2) if active == True: wall_time = sys.time() if sys.time() > (wall_time + WALL_TIMEOUT): state = STATE_LOOK_FOR_WALL # end of the FSM # set the velocities velocity.set_tvrv(tv, rv) #set the message hba.set_msg(0, 0, 0)
def __init__(self, left_motor: ctre.CANTalon, right_motor: ctre.CANTalon, **kwargs): ''' Represents a drivetrain that uses CANTalons and so manages those advanced features :param left_motor: :param right_motor: :param kwargs: ''' Subsystem.__init__(self) wpilib.MotorSafety.__init__(self) self.robot_width = kwargs.pop("robot_width", 29.25 / 12) self.max_turn_radius = kwargs.pop("max_radius", 10) self.wheel_diameter = kwargs.pop("wheel_diameter", 4) self.max_speed = kwargs.pop("max_speed", 13) self.ahrs = AHRS.create_i2c() self._left_motor = left_motor self._right_motor = right_motor self._model_left_dist = 0 self._model_right_dist = 0 self._model_last_time = robot_time.millis() pose.init(left_encoder_callback=self.get_left_distance, right_encoder_callback=self.get_right_distance, gyro_callback=(None if wpilib.hal.isSimulation() else self.get_heading_rads), wheelbase=self.robot_width) dashboard2.graph("Pose X", lambda: pose.get_current_pose().x) dashboard2.graph("Pose Y", lambda: pose.get_current_pose().y) dashboard2.graph("Distance to target", lambda: pose.get_current_pose().distance(mathutils.Vector2(6, -4))) self._max_output = 1 self._mode = SmartDrivetrain.Mode.PercentVbus self.set_mode(self._mode) if wpilib.hal.isSimulation(): model_thread = threading.Thread(target=self._update_model) model_thread.start() # Motor safety self.setSafetyEnabled(True) pose.init(left_encoder_callback=self.get_left_distance, right_encoder_callback=self.get_right_distance, gyro_callback=(self.get_heading if not wpilib.hal.isSimulation() else None), wheelbase=self.robot_width, encoder_factor=self.get_fps_rpm_ratio()) dashboard2.graph("Heading", lambda: pose.get_current_pose().heading * 180 / math.pi)
def init(kff, kff_offset, kp, ki): velocity.init(kff, kff_offset, kp, ki) leds.init() pose.init() motion.init() neighbors.init(NBR_PERIOD)
logging.basicConfig( filename="logs/pose.log", level=logging.DEBUG, format='%(asctime)s.%(msecs)03d %(threadName)s %(levelname)-8s %(message)s', datefmt='%Y-%m-%d %H:%M:%S') from flask import Flask from flask import send_file, send_from_directory from flask import jsonify from flask import request import pose app = Flask(__name__) pose.init() def get_request_file(request): if 'file' not in request.files: return None file = request.files['file'] input_file = io.BytesIO() file.save(input_file) return np.fromstring(input_file.getvalue(), dtype=np.uint8) def send_pic(pic): _, jpg = cv.imencode(".jpg", pic) out_file = io.BytesIO()