image = mpimg.imread('sample.jpg') def rover_coords(binary_img): # TODO: fill in this function to # Calculate pixel positions with reference to the rover # position being at the center bottom of the image. ypos, xpos = binary_img.nonzero() # Calculate pixel positions with reference to the rover position being at the # center bottom of the image. x_pixel = -(ypos - binary_img.shape[0]) y_pixel = -(xpos - binary_img.shape[1]/2) return x_pixel, y_pixel # Perform warping and color thresholding warped = perspect_transform(image, source, destination) colorsel = color_thresh(warped, rgb_threshold) # Extract x and y positions of navigable terrain pixels # and convert to rover coordinates xpix, ypix = rover_coords(colorsel) # Plot the map in rover-centric coords fig = plt.figure(figsize=(5, 7.5)) plt.plot(xpix, ypix, '.') plt.ylim(-160, 160) plt.xlim(0, 160) plt.title('Rover-Centric Map', fontsize=20) plt.show() # Uncomment if running on your local machine
bottom_offset = 6 source = np.float32([[14, 140], [301, 140], [200, 96], [118, 96]]) destination = np.float32([ [image.shape[1] / 2 - dst_size, image.shape[0] - bottom_offset], [image.shape[1] / 2 + dst_size, image.shape[0] - bottom_offset], [ image.shape[1] / 2 + dst_size, image.shape[0] - 2 * dst_size - bottom_offset ], [ image.shape[1] / 2 - dst_size, image.shape[0] - 2 * dst_size - bottom_offset ], ]) warped = perspect_transform(image, source, destination) colorsel = color_thresh(warped, rgb_thresh=(160, 160, 160)) # Extract nabigable terrain pixels xpix, ypix = rover_coords(colorsel) # Generate 200x200 pixel worldmap worldmap = np.zeros((200, 200)) scale = 10 # Get navigable pixel positions in world coords x_world, y_world = pix_to_world(xpix, ypix, rover_xpos, rover_ypos, rover_yaw, worldmap.shape[0], scale) # Add pixel positions to worldmap worldmap[y_world, x_world] += 1 print('Xpos = ', rover_xpos, 'Ypos = ', rover_ypos, 'Yaw = ', rover_yaw) # Plot the map in rover-centric coords f, (ax1, ax2) = plt.subplots(1, 2, figsize=(14, 7)) f.tight_layout()
destination = np.float32([ [image.shape[1] / 2 - dst_size, image.shape[0] - bottom_offset], [image.shape[1] / 2 + dst_size, image.shape[0] - bottom_offset], [ image.shape[1] / 2 + dst_size, image.shape[0] - 2 * dst_size - bottom_offset ], [ image.shape[1] / 2 - dst_size, image.shape[0] - 2 * dst_size - bottom_offset ], ]) warped = perspect_transform(image, source, destination) # Perform perspective transform colorsel = color_thresh(warped, rgb_thresh=(160, 160, 160)) # threshold the warped image xpix, ypix = rover_coords(colorsel) # convert to rover-centric coordinates distances, angles = to_polar_coords(xpix, ypix) # Convert to polar coordinates avg_angle = np.mean(angles) # Compite the average angle # Do some plotting fig = plt.figure(figsize=(12, 9)) plt.subplot(221) plt.imshow(image) plt.subplot(222) plt.imshow(warped) plt.subplot(223) plt.imshow(colorsel, cmap='gray') plt.subplot(224) plt.plot(xpix, ypix, '.') plt.ylim(-160, 160)