def run(self): # Create an RMHC SLAM object with a laser model and optional robot model self.slam = RMHC_SLAM(LaserModel(), MAP_SIZE_PIXELS, MAP_SIZE_METERS) # Set up a SLAM display self.viz = MapVisualizer(MAP_SIZE_PIXELS, MAP_SIZE_METERS, 'SLAM') command_handlers = { Message.lidar_data: self.update_map, Message.get_map: self.process_get_map_command, Message.reset_map: self.process_reset_map_command, } self.message_bus.subscribe(topic = self.lidar_updates_topic_name, subscriber=self.name) self.config.log.info("slam subscribed to %s" % self.lidar_updates_topic_name) while True: msg = self.message_bus.receive(self.name) handler = command_handlers.get(msg.cmd, self.handle_invalid_command_id) handler(msg) return
def main(): # Connect to Lidar unit lidar = Lidar(LIDAR_DEVICE) # Create an RMHC SLAM object with a laser model and optional robot model slam = RMHC_SLAM(LaserModel(), MAP_SIZE_PIXELS, MAP_SIZE_METERS) # Set up a SLAM display viz = MapVisualizer(MAP_SIZE_PIXELS, MAP_SIZE_METERS, 'SLAM') # Initialize an empty trajectory trajectory = [] # Initialize empty map mapbytes = bytearray(MAP_SIZE_PIXELS * MAP_SIZE_PIXELS) # Create an iterator to collect scan data from the RPLidar iterator = lidar.iter_scans() # We will use these to store previous scan in case current scan is inadequate previous_distances = None previous_angles = None # First scan is crap, so ignore it next(iterator) while True: # Extract (quality, angle, distance) triples from current scan items = [item for item in next(iterator)] # Extract distances and angles from triples distances = [item[2] for item in items] print(distances) angles = [item[1] for item in items] # Update SLAM with current Lidar scan and scan angles if adequate if len(distances) > MIN_SAMPLES: slam.update(distances, scan_angles_degrees=angles) previous_distances = distances.copy() previous_angles = angles.copy() # If not adequate, use previous elif previous_distances is not None: slam.update(previous_distances, scan_angles_degrees=previous_angles) # Get current robot position x, y, theta = slam.getpos() # Get current map bytes as grayscale slam.getmap(mapbytes) # Display map and robot pose, exiting gracefully if user closes it if not viz.display(x / 1000., y / 1000., theta, mapbytes): exit(0) # Shut down the lidar connection lidar.stop() lidar.disconnect()
def build(self, laserModel, map_size_pix, map_size_m, initial_position=(-1, -1)): slam = RMHC_SLAM(LaserModel(), self.MAP_SIZE_PIXELS, self.MAP_SIZE_METERS) if not initial_position == (-1, -1): slam.position = pybreezyslam.Position(initial_position[0] * 10, initial_position[0] * 10, 0) return slam
def __init__(self): self.flag = 0 self.pause = 0 self.lidar = Lidar(LIDAR_DEVICE) self.slam = RMHC_SLAM(LaserModel(), MAP_SIZE_PIXELS, MAP_SIZE_METERS) #self.viz = MapVisualizer(MAP_SIZE_PIXELS, MAP_SIZE_METERS, 'SLAM') self.trajectory = [] self.mapbytes = bytearray(MAP_SIZE_PIXELS * MAP_SIZE_PIXELS) self.iterator = self.lidar.iter_scans() self.previous_distances = None self.x = 0.0 self.y = 0.0 self.theta = 0.0
def __init__(self): # Connect to Lidar unit self.lidar = lidarReader('/dev/mylidar', 115200) self.mover = MotorController() # Create an RMHC SLAM object with a laser model and optional robot model self.slam = RMHC_SLAM(LaserModel(), MAP_SIZE_PIXELS, MAP_SIZE_METERS) # Set up a SLAM display self.viz = MapVisualizer(MAP_SIZE_PIXELS, MAP_SIZE_METERS, 'SLAM') # Initialize empty map self.mapbytes = bytearray(MAP_SIZE_PIXELS * MAP_SIZE_PIXELS) self.left_distance_table = np.zeros(80) self.right_distance_table = np.zeros(80) self.thread = threading.Thread(target=self.navigate, args=()) self.thread.start()
def __init__(self): super().__init__('slam_node') # Create second subscription (SLAM) self.subscription2 = self.create_subscription( LidarData, 'lidar_data', self.listener_callback_lidar, 100) self.subscription2 # prevent unused variable warning # Create third subscription (Motor speeds to compute robot pose change) self.subscription3 = self.create_subscription( MotorsSpeed, 'motors_speed', self.listener_callback_motorsspeed, 1000) self.subscription3 # prevent unused variable warning # create subscription to control map_quality self.slam_control_sub = self.create_subscription( String, 'slam_control', self.slam_control_callback, 1000) # Create publisher for the position of the robot, and for the map self.publisher_position = self.create_publisher( Position, 'robot_pos', 10) self.publisher_map = self.create_publisher(Map, 'world_map', 10) # Initialize parameters for slam laser = LaserModel(detection_margin=DETECTION_MARGIN, offset_mm=OFFSET_MM) self.slam = RMHC_SLAM(laser, MAP_SIZE_PIXELS, MAP_SIZE_METERS, map_quality=MAP_QUALITY, hole_width_mm=OBSTACLE_WIDTH_MM, x0_mm=X0, y0_mm=Y0, theta0=0, sigma_xy_mm=DEFAULT_SIGMA_XY_MM) self.trajectory = [] self.mapbytes = bytearray(MAP_SIZE_PIXELS * MAP_SIZE_PIXELS) self.previous_distances = None self.previous_angles = None self.robot_pose_change = np.array([0.0, 0.0, 0.0]) self.is_map_quality_high = True # DEBUG parameters self.map_index = 0 self.previous_pos = (0, 0, 0) self.s = time.time() print("SLAM is starting")
def __init__(self, visualize): super().__init__('cont') self.visualize = visualize self.slam = RMHC_SLAM(LaserModel(), MAP_SIZE_PIXELS, MAP_SIZE_METERS) if self.visualize: self.viz = MapVisualizer(MAP_SIZE_PIXELS, MAP_SIZE_METERS, 'SLAM') self.subscriber = self.create_subscription(LaserScan, '/scan', self.lidarCallback, 10) self.mapPublisher = self.create_publisher(OccupancyGrid, '/map', 1) self.locPublisher = self.create_publisher(Pose, '/pose', 1) print("Setup Finished") self.previous_distances = None self.previous_angles = None self.mapbytes = bytearray(MAP_SIZE_PIXELS * MAP_SIZE_PIXELS) self.i = True
def __init__(self, station, robot_pos): self.station = station self.robot_pos = robot_pos # SLAM self.MAP_SIZE_PIXELS = 500 self.MAP_SIZE_METERS = 10 self.MIN_SAMPLES = 200 self.lidar = Lidar('/dev/ttyUSB0') self.initial_pos_cm = (0, 0) self.slam = SlamFactory.build(LaserModel(), self.MAP_SIZE_PIXELS, self.MAP_SIZE_METERS, initial_pos_cm) self.mapbytes = bytearray(self.MAP_SIZE_PIXELS * self.MAP_SIZE_PIXELS) self.iterator = self.lidar.iter_scans() self.previous_distances = None self.previous_angles = None next(self.iterator)
def main(): lidar = Lidar(device='/dev/ttyACM0') slam = RMHC_SLAM(LaserModel(), map_size_pixels=MAP_SIZE_PIXELS, map_size_meters=MAP_SIZE_METERS) display = SlamShow(MAP_SIZE_PIXELS, MAP_SIZE_METERS*1000/MAP_SIZE_PIXELS, 'SLAM') #image_thread = threading.Thread(target=save_image, args=[display]) #image_thread.start() mapbytes = bytearray(MAP_SIZE_PIXELS * MAP_SIZE_PIXELS) while True: slam.update(lidar.getScan()) x, y, theta = slam.getpos() #print(x, y, theta) slam.getmap(mapbytes) display.displayMap(mapbytes) display.setPose(x, y, theta) #display.save_image() display.save_pgm(mapbytes) if not display.refresh(): exit(0)
def __init__(self, messagequeue): # Connect to Lidar unit # when initializing the robot object, we first open lidar and start # reading data from the lidar self.lidar = lidarReader('/dev/mylidar', 115200) self.mover = MotorController() # Create an RMHC SLAM object with a laser model and optional robot model self.slam = RMHC_SLAM(LaserModel(), MAP_SIZE_PIXELS, MAP_SIZE_METERS) # Set up a SLAM display self.viz = MapVisualizer(MAP_SIZE_PIXELS, MAP_SIZE_METERS, 'SLAM') # Initialize empty map self.mapbytes = bytearray(MAP_SIZE_PIXELS * MAP_SIZE_PIXELS) self.messages = messagequeue # All the functions should run in the same process so that the variable can be shared self.thread = threading.Thread(target=self.navigate, args=()) # the main-thread will exit without checking the sub-thread at the end of the main thread. # At the same time, all sub-threads with the daemon value of True will end with the main thread, regardless of whether the operation is completed. self.thread.daemon = True self.navigating = True self.thread.start()
def setup(self): GPIO.output(23, GPIO.HIGH) sleep(1) self.lidar = RPLidar(self.port) self.lidar.connect() self.lidar.start_motor() self.slam = RMHC_SLAM(LaserModel(), MAP_SIZE_PIXELS, MAP_SIZE_METERS, map_quality=1, max_search_iter=50) self.mapbytes = bytearray(MAP_SIZE_PIXELS * MAP_SIZE_PIXELS) success = False while success is not True: try: self.iterator = self.lidar.iter_scans() next(self.iterator) #first scan is shit success = True except Exception as e: print('retrying') print(e) sleep(0.5)
async def main(): async with websockets.connect(SERVER_URL, max_size=None, max_queue=None, ping_interval=None, ping_timeout=None) as websocket: # Connect to Lidar unit lidar = Lidar(LIDAR_DEVICE) # Create an RMHC SLAM object with a laser model and optional robot model slam = RMHC_SLAM(LaserModel(), MAP_SIZE_PIXELS, MAP_SIZE_METERS) # Initialize an empty trajectory trajectory = [] # Initialize empty map mapbytes = bytearray(MAP_SIZE_PIXELS * MAP_SIZE_PIXELS) # Create an iterator to collect scan data from the RPLidar iterator = lidar.iter_scans() # We will use these to store previous scan in case current scan is inadequate previous_distances = None previous_angles = None # First scan is crap, so ignore it next(iterator) while True: # Extract (quality, angle, distance) triples from current scan items = [item for item in next(iterator)] # Extract distances and angles from triples distances = [item[2] for item in items] angles = [item[1] for item in items] # Update SLAM with current Lidar scan and scan angles if adequate if len(distances) > MIN_SAMPLES: slam.update(distances, scan_angles_degrees=angles) previous_distances = distances.copy() previous_angles = angles.copy() # If not adequate, use previous elif previous_distances is not None: slam.update(previous_distances, scan_angles_degrees=previous_angles) # Get current robot position x, y, theta = slam.getpos() # Get current map bytes as grayscale slam.getmap(mapbytes) while True: try: await websocket.send(struct.pack(STRUCT_FORMAT, x, y, theta, mapbytes)) except: else: break # Shut down the lidar connection lidar.stop() lidar.disconnect()
# save to a png file fig.savefig(image_filename) return self._refresh() # image file image_filename = '/home/ubuntu/RCWeb/dash' HOST, PORT = "192.168.0.18", 50007 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((HOST, PORT)) # Create an RMHC SLAM object with a laser model and optional robot model slam = RMHC_SLAM(LaserModel(), MAP_SIZE_PIXELS, MAP_SIZE_METERS) # Set up a SLAM display viz = MapVisualizer(MAP_SIZE_PIXELS, MAP_SIZE_METERS, 'SLAM') # Initialize an empty trajectory trajectory = [] # Initialize empty map mapbytes = bytearray(MAP_SIZE_PIXELS * MAP_SIZE_PIXELS) # We will use these to store previous scan in case current scan is inadequate previous_distances = None previous_angles = None # Pin Definitons:
img_dict[0] = cv2.flip(cv2.imread('icons8-street-view-50.jpg'), 0) img_dict[1] = cv2.flip(cv2.imread('icons8-staircase-50.jpg'), 0) img_dict[2] = cv2.flip(cv2.imread('icons8-fire-extinguisher-50.jpg'), 0) img_dict[3] = cv2.flip(cv2.imread('icons8-exit-sign-50.jpg'), 0) img_dict[4] = cv2.flip(cv2.imread('icons8-circuit-50.jpg'), 0) img_dict[5] = cv2.flip(cv2.imread('icons8-elevator-50.jpg'), 0) img_dict[6] = cv2.flip(cv2.imread('icons8-test-tube-50.jpg'), 0) img_dict[7] = cv2.flip(cv2.imread('icons8-biohazard-26.jpg'), 0) img_dict[8] = cv2.flip(cv2.imread('icons8-radio-active-50.jpg'), 0) img_dict[9] = cv2.flip(cv2.imread('icons8-door-opened-50.jpg'), 0) temp = np.load('scan.npy') print(temp) # Connect to Lidar and initialize variables lidar = Lidar(args.device) slam = RMHC_SLAM(LaserModel(), args.pixelmapsize, args.metermapsize) display = SlamShow(args.pixelmapsize, args.metermapsize * 1000 / args.pixelmapsize, 'SLAM') trajectory = [] mapbytes = bytearray(args.pixelmapsize * args.pixelmapsize) iterator = lidar.iter_scans() previous_distances = None previous_angles = None next(iterator) prev_items = None labels_dict = {} np.save("label.npy", np.array([])) ### Primary SLAM Loop while True:
def init_slam(self): self.MAP_SIZE_PIXELS = cfg.MAP_SIZE_PIXELS self.MAP_SIZE_METERS = cfg.MAP_SIZE_METERS self.slam = RMHC_SLAM(LaserModel(), self.MAP_SIZE_PIXELS, self.MAP_SIZE_METERS) self.mapbytes = bytearray(self.MAP_SIZE_PIXELS * self.MAP_SIZE_PIXELS)
def __init__(self, mode='xbox', initialize_imu=True, imu_bus=0, initialize_camera=False, initialize_lidar=False, slam_map_meters=15, slam_map_pixels=1000, max_speed_limit=5, max_duty_cycle=0.2, rc_control='VESC_CONTROL', rc_communication='/dev/ttyTHS1'): """ :param mode: Control mode for the vehicle. Possible values are: >- JetsonEV.xbox_mode >- JetsonEV.xbox_direct_mode >- JetsonEV.xbox_forwarding_mode >See [here](./modes.html) for more details on each mode. :param initialize_imu: Whether or not to initialize imu :param imu_bus: I2C bus number to communicate with the IMU :param initialize_camera: Whether or not to initialize camera :param initialize_lidar: Whether or not to initialize rplidar :param slam_map_meters: The distance in meters to use for the SLAM map size. A square map is made using this distance for each side. :param slam_map_pixels: The SLAM square map size in pixels. This controls the resolution of the map. :param max_speed_limit: Max speed in m/s when using speed controller. :param max_duty_cycle: Max duty cycle to send when using direct duty cycle control. :param rc_control: Type of hardware control for rc components (motor and steering servo). Can be either JetsonEV.VESC_CONTROL or JetsonEV.ARDUINO_CONTROL. :param rc_communication: Port or bus to use for rc_control. If using VESC, this should point to the UART port. If using USB for UART control, set rc_communication = 'USB' (linux only). If rc_control=JetsonEV.ARDUINO, this should be an integer for the I2C bus number. * * * """ JetsonEV.__running_car__ = self self.rc_control = rc_control self._timed_tasks = [] """private list to hold all existing TimedTask objects""" self._output_sockets = [] """private list to hold all existing output TCP sockets objects""" self.max_speed_limit = max_speed_limit self.duty_cycle_limit = max_duty_cycle self._speed = [0] self._speed_lock = threading.Lock() '''************** Initiate Steering and Motor ****************''' try: self._initialize_rc_control(rc_communication) except OSError as e: print(e) '''*********************************************************''' '''****************** Initiate Camera **********************''' self.camera = None if initialize_camera: print('initiating Camera... ', end='') try: camera = Camera(flip=2) self._latest_frame = [0] self._latest_frame_lock = threading.Lock() self.camera_socket = SendSocket(tcp_port=JetsonEV.CAMERA_PORT, tcp_ip=ip_address) self._output_sockets.append(self.camera_socket) def update_frame(frame): self.latest_frame = frame self.camera_socket.send_data(frame) # wrap the object in a TimedTask set to update at fixed rate self.camera = self.add_timed_task( calling_function=camera.read, object_to_wrap=camera, forwarding_function=update_frame, sampling_rate=120) except Exception as e: print(e) print('success') '''*********************************************************''' '''******************* Initiate IMU ************************''' self.imu: TimedTask if initialize_imu: try: imu = MPU6050(bus=imu_bus, device_addr=0x68) # create the imu device print('Successfully connected to IMU') self._imu_values = [0] self._imu_values_lock = threading.Lock() # wrap the object in a TimedTask set to update at fixed rate self.imu = self.add_timed_task( calling_function=self._get_imu_values, object_to_wrap=imu, sampling_rate=50) self.imu_socket = SendSocket(tcp_port=JetsonEV.IMU_PORT, tcp_ip=ip_address) self._output_sockets.append(self.imu_socket) imu_config_dict = get_calibration() self._imu_rotation = np.array( imu_config_dict['rotation_matrix']) self._gravity_offset = imu_config_dict['gravity'] self._gyro_offset = np.array(imu_config_dict['gyro_offsets']) except Exception as e: print(e) print('ERROR: Could not connect to IMU') self.imu = None if hasattr(self, 'imu_socket'): self._output_sockets.remove(self.imu_socket) '''*********************************************************''' '''****************** Initiate Lidar ***********************''' self.lidar = None self._theta_lock = threading.Lock() self._theta = 0 self._x_lock = threading.Lock() self._x = 0 self._y_lock = threading.Lock() self._y = 0 self._map_lock = threading.Lock() self._map = bytearray(slam_map_pixels * slam_map_pixels) self._breezyslam = RMHC_SLAM(LaserModel(), map_size_meters=slam_map_meters, map_size_pixels=slam_map_pixels, hole_width_mm=500) if initialize_lidar: started = threading.Event( ) # threading event to watch if lidar started # This is done to prevent the script from locking up if the lidar needs to be restarted def check_lidar(): start_time = time.time() while not started.isSet(): time.sleep(0.2) if time.time() - start_time > 10: os.kill(os.getpid(), signal.SIGTERM) raise Exception('Unable to start Lidar.') initialize_thread = threading.Thread(target=check_lidar) initialize_thread.start() try: print('connecting to lidar...', end='') lidar = RPLidar(scan_mode=0) started.set() try: initialize_thread.join() except Exception as e: raise e except Exception as e: print(e) print('ERROR: Unable to connect to lidar sensor.') os.kill(os.getpid(), signal.SIGTERM) self._latest_lidar_scan = lidar.get_scan_as_xy(filter_quality=True) self._latest_lidar_scan_lock = threading.Lock() print('success') self.lidar_socket = SendSocket(tcp_port=JetsonEV.LIDAR_PORT, tcp_ip=ip_address) self._output_sockets.append(self.lidar_socket) self.slam_socket = SendSocket(tcp_port=JetsonEV.SLAM_PORT, tcp_ip=ip_address) self._output_sockets.append(self.slam_socket) self._update_map = [True] self._save_criteria_dist = 100 # must move ** mm before updating map self._save_criteria_angle = 20 # must rotate more than ** deg before updating map def update_scan(scan): scan = np.array(scan) self.latest_lidar_scan = scan self._breezyslam.update(scan[:, 1].tolist(), scan_angles_degrees=scan[:, 0].tolist(), should_update_map=self._update_map[0]) x, y, theta = self._breezyslam.getpos() dist = np.sqrt((x - self.x)**2 + (y - self.y)**2) del_theta = abs(theta - self.theta) if dist > self._save_criteria_dist or del_theta > self._save_criteria_angle: self._update_map[0] = True else: self._update_map[0] = False self.x = x self.y = y self.theta = theta with self._map_lock: self._breezyslam.getmap(self._map) self.lidar_socket.send_data(scan) self.slam_socket.send_data({ 'x': self.x, 'y': self.y, 'theta': self.theta, 'map': np.frombuffer(self.map, dtype=np.uint8), 'meters': slam_map_meters, 'pixels': slam_map_pixels }) self.lidar = self.add_timed_task( calling_function=lidar.get_scan_as_vectors, calling_func_args=[True], object_to_wrap=lidar, forwarding_function=update_scan, sampling_rate=5) print("initializing SLAM map") for _ in range(10): scan = np.array(lidar.get_scan_as_vectors(True)) self._breezyslam.update(scan[:, 1].tolist(), scan_angles_degrees=scan[:, 0].tolist(), should_update_map=True) time.sleep(0.05) '''*********************************************************''' '''************ Initiate XBOX Controller *******************''' self.xbox_controller: Xbox360Controller self._initialize_xbox_controller(mode) '''*********************************************************''' '''***************** Setup speed control *******************''' self._desired_speed = [0] self.last_error = 0 self._current_duty = 0 self._integrated_speed_error = 0 self._desired_speed_lock = threading.Lock() self._speed_control_task = TimedTask( calling_function=self._speed_control, sampling_rate=20) if mode in [JetsonEV.xbox_mode]: # define an in between function to convert percentage from xbox controller to speed using max speed limit. def percent_to_speed(percent): self.set_motor_speed(percent * self.max_speed_limit) self._xbox_speed_func = percent_to_speed self.duty_cycle_limit = 1 self.start_speed_control() elif mode in [JetsonEV.xbox_direct_mode]: self._xbox_speed_func = self.set_motor_percent '''*********************************************************''' print('Starting Vehicle...', end='') self.start_sockets() self.start_all_tasks() print('running.')
import paho.mqtt.client as mqtt import math import matplotlib.pyplot as plt import numpy as np import time from breezyslam.algorithms import RMHC_SLAM from breezyslam.sensors import RPLidarA1 as LaserModel from PIL import Image MAP_SIZE_PIXELS = 300 MAP_SIZE_METERS = 50 MIN_SAMPLES = 200 slam = RMHC_SLAM(LaserModel(), MAP_SIZE_PIXELS, MAP_SIZE_METERS, random_seed=1) mapbytes = bytearray(MAP_SIZE_PIXELS * MAP_SIZE_PIXELS) Dist = {} def mm2pix(mm): return int(mm / (MAP_SIZE_METERS * 1000. / MAP_SIZE_PIXELS)) def on_connect(self, client, userdata, rc): print("MQTT Connected.") self.subscribe("/ldb01/mapdata") def on_message(client, userdata, msg): # print("Messsage")
def start_simulation(self): self.set_motor_speed(0.8, 0) (errorCode, detectionState, detectedPoint, detectedObjectHandle, detectedSurfaceNormalVector) = vrep.simxReadProximitySensor( self.client_id, self.proximity_handle, vrep.simx_opmode_streaming) e, data = vrep.simxGetStringSignal(self.client_id, "lidarMeasuredData", vrep.simx_opmode_streaming) self.check_error_code(e, "simxGetStringSignal lidar error") # Create an RMHC SLAM object with a laser model and optional robot model slam = RMHC_SLAM(LaserModel(), MAP_SIZE_PIXELS, MAP_SIZE_METERS) # Set up a SLAM display viz = MapVisualizer(MAP_SIZE_PIXELS, MAP_SIZE_METERS, 'SLAM') # Initialize empty map mapbytes = bytearray(MAP_SIZE_PIXELS * MAP_SIZE_PIXELS) res, v0 = vrep.simxGetObjectHandle(self.client_id, 'Vision_sensor', vrep.simx_opmode_oneshot_wait) res, resolution, image = vrep.simxGetVisionSensorImage( self.client_id, v0, 0, vrep.simx_opmode_streaming) while vrep.simxGetConnectionId(self.client_id) != -1: is_detected, distance, vector = self.get_proximity_data() if is_detected: print("distance: {} {}".format(distance, vector)) e, lidar_data, dist_data = self.get_lidar_data() point_data_len = len(lidar_data) dist_data_len = len(dist_data) dist_data = dist_data[::-1] print("points len: {}; dist len: {}".format( point_data_len, dist_data_len)) self.draw_lidar_data(dist_data[0:-2]) # Update SLAM , with current Lidar scan slam.update(dist_data[0:-2]) # Get current robot position x, y, theta = slam.getpos() print("x: {}; y: {}; theta: {}".format(x, y, theta)) # Get current map bytes as grayscale slam.getmap(mapbytes) # Display map and robot pose, exiting gracefully if user closes it if not viz.display(x / 1000., y / 1000., theta, mapbytes): exit(0) err, resolution, image = vrep.simxGetVisionSensorImage( self.client_id, v0, 0, vrep.simx_opmode_buffer) if err == vrep.simx_return_ok: img = np.array(image, dtype=np.uint8) img.resize([resolution[1], resolution[0], 3]) fimg = cv2.flip(img, 0) cv2.imshow('vision sensor', fimg) if cv2.waitKey(1) & 0xFF == ord('q'): break elif err == vrep.simx_return_novalue_flag: print("no image yet") pass else: print(err) simulationTime = vrep.simxGetLastCmdTime(self.client_id) time.sleep(0.1) vrep.simxFinish(self.client_id)
def slam(currentProgram): trajectory = [] # Connect to Lidar unit try: lidar = Lidar(PORT0) currentProgram.roombaPort = PORT1 iterator = lidar.iter_scans(1000) lidar.stop() next(iterator) print("Lidar 0, Roomba 1") except: print("Roomba 0, Lidar 1") lidar.stop() lidar.disconnect() lidar = Lidar(PORT1) currentProgram.roombaPort = PORT0 iterator = lidar.iter_scans(1000) lidar.stop() next(iterator) # Create an RMHC SLAM object with a laser model and optional robot model slam = RMHC_SLAM(LaserModel(), MAP_SIZE_PIXELS, MAP_SIZE_METERS) trajectory = [] previous_distances = None previous_angles = None # start time start_time = time.time() prevTime = start_time trigger_start = -100 while (not currentProgram.stop): SLAMvel = currentProgram.SLAMvals[0] SLAMrot = currentProgram.SLAMvals[1] # Extract (quality, angle, distance) triples from current scan items = [item for item in next(iterator)] # Extract distances and angles from triples distances = [item[2] for item in items] angles = [item[1] for item in items] l = list(zip(angles,distances)) filtered = list(filter(lambda e: (e[0]>=45 and e[0]<=135) and e[1]<300 , l)) # s = sorted(l, key = lambda e: e[0]) if (len(filtered) > constants.POINTS_THRESHOLD) and (time.time()-trigger_start >5): currentProgram.programStatus = constants.Status.LIDAR_OBSTACLE topleft = list(filter(lambda e: (e[0]>=105 and e[0]<=135) , filtered)) front = list(filter(lambda e: (e[0]>=75 and e[0]<=105) , filtered)) topright = list(filter(lambda e: (e[0]>=45 and e[0]<=75) , filtered)) if (len(topleft) > 2): currentProgram.obstacleLocation[0] = 1 if (len(front) > 2): currentProgram.obstacleLocation[1] = 1 if (len(topright) > 2): currentProgram.obstacleLocation[2] = 1 trigger_start = time.time() # Update SLAM with current Lidar scan and scan angles if adequate if len(distances) > MIN_SAMPLES: # print("using speeds ", SLAMvel, SLAMrot) dt = time.time() - prevTime slam.update(distances, pose_change=( (SLAMvel*dt, SLAMrot*dt, dt)), scan_angles_degrees=angles) prevTime = time.time() previous_distances = copy.copy(distances) previous_angles = copy.copy(angles) # print("updated - if") # If not adequate, use previous elif previous_distances is not None: # print("using speeds ", SLAMvel, SLAMrot) dt = time.time() - prevTime slam.update(previous_distances, pose_change=( (SLAMvel*dt, SLAMrot*dt, dt)), scan_angles_degrees=previous_angles) prevTime = time.time() # print("updated - else") # Get current robot position x, y, theta = slam.getpos() [x_pix, y_pix] = [mm2pix(x), mm2pix(y)] currentProgram.robot_pos = [ y_pix // constants.CHUNK_SIZE, x_pix // constants.CHUNK_SIZE] # print("robot_pos - ",x_pix // constants.CHUNK_SIZE,y_pix // constants.CHUNK_SIZE, theta) # Get current map bytes as grayscale slam.getmap(currentProgram.mapbytes) # Shut down the lidar connection pgm_save('ok.pgm', currentProgram.mapbytes, (MAP_SIZE_PIXELS, MAP_SIZE_PIXELS)) lidar.stop() lidar.disconnect()
async def main(): #--------------------------------------- # Setup Lidar unit and SLAM #--------------------------------------- # Connect to Lidar unit lidar = nanoLidar.nanoLidar(host) # get firmware info print("Lidar Version:") print(lidar.getInfo()) # Create an RMHC SLAM object with a laser model and optional robot model slam = RMHC_SLAM(LaserModel(), MAP_SIZE_PIXELS, MAP_SIZE_METERS) # Set up a SLAM display display = SlamShow(MAP_SIZE_PIXELS, MAP_SIZE_METERS * 1000 / MAP_SIZE_PIXELS, 'SLAM') # Initialize an empty trajectory trajectory = [] # Initialize empty map mapbytes = bytearray(MAP_SIZE_PIXELS * MAP_SIZE_PIXELS) #--------------------------------------- # Setup Robot unit #--------------------------------------- # Connect to robot unit robot = robotPosition.robotPosition(host) robot.powerOn() # get firmware info print("Chassis Version:") print(robot.getInfo()) #--------------------------------------- # main loop #--------------------------------------- run = True while run: # Update SLAM with current Lidar scan, using first element of (scan, quality) pairs slam.update([pair[0] for pair in lidar.getScan()]) # Get current robot position x, y, theta = slam.getpos() # Get current map bytes as grayscale slam.getmap(mapbytes) display.displayMap(mapbytes) display.setPose(x, y, theta) # Exit on ESCape key = display.refresh() if key != None and (key & 0x1A): run = False # Movement control distance = int(input("Distance [mm]: ")) angle = int(input("Turn angle [degrees]: ")) (xp, yp, theta_deg, theta_rad) = robot.setPosition(distance, angle) if int(input("Carry on (1/0): ")) == 0: run = False # power off stepper motors robot.powerOff()
def slamThread(): global SLAMrot global SLAMvel # Connect to Lidar unit lidar = Lidar(LIDAR_DEVICE) # Create an RMHC SLAM object with a laser model and optional robot model slam = RMHC_SLAM(LaserModel(), MAP_SIZE_PIXELS, MAP_SIZE_METERS) # Set up a SLAM display viz = MapVisualizer(MAP_SIZE_PIXELS, MAP_SIZE_METERS, 'SLAM') # Initialize an empty trajectory trajectory = [] # Initialize empty map mapbytes = bytearray(MAP_SIZE_PIXELS * MAP_SIZE_PIXELS) # Create an iterator to collect scan data from the RPLidar iterator = lidar.iter_scans() # We will use these to store previous scan in case current scan is inadequate previous_distances = None previous_angles = None # First scan is crap, so ignore it next(iterator) # start time start_time = time.time() prevTime = start_time print("start") while True: # Extract (quality, angle, distance) triples from current scan items = [item for item in next(iterator)] # Extract distances and angles from triples distances = [item[2] for item in items] angles = [item[1] for item in items] # Update SLAM with current Lidar scan and scan angles if adequate if len(distances) > MIN_SAMPLES: slam.update(distances, pose_change = ( (SLAMvel, SLAMrot, time.time() - prevTime)),scan_angles_degrees=angles) prevTime = time.time() previous_distances = copy.copy(distances) previous_angles = copy.copy(angles) print("updated - if") print((SLAMvel, SLAMrot, time.time() - prevTime)) # If not adequate, use previous elif previous_distances is not None: slam.update(previous_distances, pose_change = ( (SLAMvel, SLAMrot, time.time() - prevTime)),scan_angles_degrees=previous_angles) prevTime = time.time() print("updated - else") print((SLAMvel, SLAMrot, time.time() - prevTime)) # Get current robot position x, y, theta = slam.getpos() # Add new position to trajectory trajectory.append((x, y)) # Get current map bytes as grayscale slam.getmap(mapbytes) if(time.time() - start_time > 30): # Put trajectory into map as black pixels for coords in trajectory: x_mm, y_mm = coords x_pix = mm2pix(x_mm) y_pix = mm2pix(y_mm) mapbytes[y_pix * MAP_SIZE_PIXELS + x_pix] = 0; pgm_save('ok.pgm', mapbytes, (MAP_SIZE_PIXELS, MAP_SIZE_PIXELS)) exit(0)
def run_slam(LIDAR_DEVICE, MAP_SIZE_PIXELS, MAP_SIZE_METERS, mapbytes, posbytes, odometry, command, state, STATES): MIN_SAMPLES = 200 # Connect to Lidar unit lidar = start_lidar(LIDAR_DEVICE) # Create an RMHC SLAM object with a laser model and optional robot model slam = RMHC_SLAM(LaserModel(), MAP_SIZE_PIXELS, MAP_SIZE_METERS, map_quality=_DEFAULT_MAP_QUALITY, hole_width_mm=_DEFAULT_HOLE_WIDTH_MM, random_seed=None, sigma_xy_mm=_DEFAULT_SIGMA_XY_MM, sigma_theta_degrees=_DEFAULT_SIGMA_THETA_DEGREES, max_search_iter=_DEFAULT_MAX_SEARCH_ITER) # Initialize empty map mapbytes_ = bytearray(MAP_SIZE_PIXELS * MAP_SIZE_PIXELS) # Send command to start scan lidar.start_scan() # We will use these to store previous scan in case current scan is inadequate previous_distances = None previous_angles = None pose_change = (0, 0, 0) pose_change_find = (0, 0, 0) iter_times = 1 while command[0] != b'qa': # read a scan items = lidar.scan() # Extract distances and angles from triples distances = [item[2] for item in items] angles = [item[1] for item in items] if state[0] == STATES['stop']: if command[0] == b'lm': mapbytes_find = mapbytes pose_change_find = find_slam_pos(lidar, MAP_SIZE_PIXELS, MAP_SIZE_METERS, mapbytes_find, pose_change_find, iter_times / 2.0, slam) iter_times = iter_times + 1.0 print(pose_change_find) pose_change = pose_change_find pose_change_find = (0, 0, 0) ''' items = lidar.scan() # Extract distances and angles from triples distances = [item[2] for item in items] angles = [item[1] for item in items] slam.update(distances, pose_change, scan_angles_degrees=angles) ''' if (iter_times > 10): command[0] = b'w' print(slam) slam.setmap(mapbytes_find) pose_change = pose_change_find pose_change_find = (0, 0, 0) print('ennnnnnnnnnnnnnnnnnd') print(pose_change) iter_times = 1 else: # Update SLAM with current Lidar scan and scan angles if adequate if len(distances) > MIN_SAMPLES: slam.update(distances, pose_change, scan_angles_degrees=angles) previous_distances = distances.copy() previous_angles = angles.copy() # If not adequate, use previous elif previous_distances is not None: slam.update(previous_distances, pose_change, scan_angles_degrees=previous_angles) #pass the value into point_cal function global point_a, point_b global current_theta_o # Get current robot position x, y, theta = slam.getpos() current_theta_o = theta point_a = x - 5000 point_b = y - 5000 # Calculate robot position in the map xbytes = int(x / 1000. / MAP_SIZE_METERS * MAP_SIZE_PIXELS) ybytes = int(y / 1000. / MAP_SIZE_METERS * MAP_SIZE_PIXELS) # Update robot position posbytes[xbytes + ybytes * MAP_SIZE_PIXELS] = 255 #print('LIDAR::', command[0], x,y,theta) pose_change = (odometry[0], odometry[1] / np.pi * 180, odometry[2]) # Get current map bytes as grayscale slam.getmap(mapbytes_) # Update map mapbytes[:] = mapbytes_ # Shut down the lidar connection lidar.stop() lidar.disconnect() print('LIDAR::thread ends')