def EKF_estimate2(self, imu_file=None, camera_file=None): import sensor_fusion as sf import robot_n_measurement_functions as rnmf camera_file = 'Datasets/data/task6/camera_tracking_task6.csv' imu_file = 'Datasets/data/task6/imu_tracking_task6.csv' IMU = pd.read_csv(imu_file, header=None) print('IMU ', np.shape(IMU)) t_robot = IMU.iloc[:, 0] GyZ = IMU.iloc[:, 8] w = np.array(GyZ).flatten() cam = pd.read_csv(camera_file, header=None) print('cam ', np.shape(cam)) self.Camera = sf.Sensor('Camera', sf.CAMERA_COLUMNS, meas_record_file=camera_file, is_linear=False, start_index=0) self.x_true = np.array([15.7, 47.5, 90.0]) self.x_init = self.x_true x = np.zeros((self.Camera.meas_record.shape[0] // 3, 3), dtype=np.float) print('x shape is ', np.shape(x)) self.Camera.reset_sampling_index() i, vel = 0, 5.5 landmarks, inputs = [], [] while (self.Camera.current_sample_index < self.Camera.time.shape[0] and i < x.shape[0] - 1): i += 1 y_ = self.Camera.get_measurement() if y_.shape[0] < 2: continue qr_row = y_[:, 0].astype('int') camera_time = self.Camera.current_time landmark = rnmf.QRCODE_LOCATIONS[qr_row, 1:] closest_index = IMU.iloc[:, 0].sub(camera_time).abs().idxmin() closest_u = np.array(IMU.iloc[closest_index, 8]) #print('qr_row len:{}, qr_row:{}, time:{}, landmarks:{},closest_u:{}'.format(np.shape(qr_row),qr_row,camera_time, np.shape(landmark),closest_u)) u = [vel, closest_u] # steering command (vel, steering angle radians) inputs.append(u) landmarks.append(landmark) print('landmarks:{}, inputs:{}'.format(np.shape(landmarks), np.shape(inputs))) '''[plt.scatter(landmark[:, 0], landmark[:, 1], marker='s', s=60) for landmark in landmarks] phi = np.deg2rad(self.x_init[-1]) plt.quiver(self.x_init[0], self.x_init[1], np.cos(phi), np.sin(phi), linewidth=2., alpha=1.0, color='black', label='init-pose') plt.grid(True) plt.legend() plt.axis('equal') plt.show()''' return landmarks, inputs
def __init__(self): self.index = 0 camera_file = 'Datasets/data/task6/camera_tracking_task6.csv' imu_file = 'Datasets/data/task6/imu_tracking_task6.csv' IMU = pd.read_csv(imu_file, header=None) print('IMU ', np.shape(IMU)) self.Camera = sf.Sensor('Camera', sf.CAMERA_COLUMNS, meas_record_file=camera_file, is_linear=False, start_index=0) print('Camera ', self.Camera.meas_record.shape[0]) x = np.zeros((self.Camera.meas_record.shape[0] // 3, 3), dtype=np.float) self.Camera.reset_sampling_index() i, vel = 0, 5.5 vel /= 2.25 landmarks, inputs = [], [] while (self.Camera.current_sample_index < self.Camera.time.shape[0] and i < x.shape[0] - 1): i += 1 y_ = self.Camera.get_measurement() if y_.shape[0] < 1: continue qr_row = y_[:, 0].astype('int') camera_time = self.Camera.current_time landmark = rnmf.QRCODE_LOCATIONS[qr_row, 1:] closest_index = IMU.iloc[:, 0].sub(camera_time).abs().idxmin() closest_u = np.array(IMU.iloc[closest_index, 8]) # print('qr_row len:{}, qr_row:{}, time:{}, landmarks:{},closest_u:{}'.format(np.shape(qr_row),qr_row,camera_time, np.shape(landmark),closest_u)) w_gyro = np.deg2rad(closest_u) u = [vel, w_gyro] # steering command (vel, steering angle radians) inputs.append(u) landmarks.append(landmark) self.DT = .55 print('landmarks:{}, inputs:{}'.format(np.shape(landmarks), np.shape(inputs))) self.landmarks = landmarks self.inputs = np.array(inputs) self.N = len(inputs)
import numpy as np import matplotlib.pyplot as plt import sensor_fusion as sf import robot_n_measurement_functions as rnmf import pathlib import seaborn as sns import matplotlib.patches as mpatches from scipy.linalg import expm sns.set() #%% n_qr_codes_min = 3 deviation_max = 5e2 parent_path = pathlib.Path.home() parent_path = parent_path/'Dropbox/09. Aalto Postdoc/DiddyBorg_experiment' IMU = sf.Sensor('IMU',sf.IMU_COLUMNS,meas_record_file=parent_path/'test-run-imu.csv',is_linear=True,start_index=3686) IMU_static = sf.Sensor('IMU_static',sf.IMU_COLUMNS,meas_record_file=parent_path/'static_position_IMU.csv',is_linear=True) Motor_input = sf.Sensor('Motor_input',sf.MOTOR_COLUMNS,meas_record_file=parent_path/'test-run-Motor-Control.csv',is_linear=True) Camera = sf.Sensor('Camera',sf.CAMERA_COLUMNS,meas_record_file=parent_path/'test-run-camera.csv',is_linear=False,start_index=154) #%% bias_omega_z = np.mean(IMU_static.meas_record[:,7]) x_init = np.array([17,60,0.]) x = np.zeros((Motor_input.meas_record.shape[0]+IMU.meas_record.shape[0],3),dtype=np.float) x[0,:] = x_init t = np.zeros(x.shape[0]) t[0] = min(IMU.time[0],Motor_input.time[0],Camera.time[0]) u_now = np.zeros(3) # #initialize deltaT as IMU deltaT # IMU_measurement = IMU.get_measurement() #time change here
import numpy as np import matplotlib.pyplot as plt import sensor_fusion as sf import robot_n_measurement_functions as rnmf import pathlib import seaborn as sns import matplotlib.patches as mpatches from scipy.linalg import expm import lsqSolve as lsqS #%% parent_path = pathlib.Path('/media/muhammad/Data/') parent_path = parent_path / 'Dropbox/09. Aalto Postdoc/DiddyBorg_experiment' Camera = sf.Sensor('Camera', sf.CAMERA_COLUMNS, meas_record_file=parent_path / 'test-run-camera.csv', is_linear=False, start_index=154) #%% df = pd.read_csv( parent_path / 'test-run-camera.csv', delimiter=',', index_col=0, ) df.index = pd.to_datetime(df.index, dayfirst=True, errors='coerce', unit='s', utc=True)
import numpy as np import matplotlib.pyplot as plt import pandas as pd import sensor_fusion as sf import robot_n_measurement_functions as rnmf import lsqSolve as lsqS camera_file = 'Datasets/data/task5/camera_localization_task5.csv' # time, qr number, Cx, Cy, w,h, di, phi Camera = sf.Sensor('Camera', sf.CAMERA_COLUMNS, meas_record_file=camera_file, is_linear=False, start_index=0) x_true = np.array([61.0, 33.9, 0.0]) x_true = np.array([61.0, 33.9, 90.0]) x_init = np.array([0, 0, 0]) # x_true + np.random.randn(3) R_one_diag = np.array([2, 10]) # params = {'x_sensors':np.zeros((1,2))} I_max = 10 gamma = 1 params_LSQ = { 'x_sensors': None, 'R': None, 'LR': None, # cholesky factorization of a matrix (chol(a) in matlab returns an upper triangular matrix, but linalg.cholesky(a) returns a lower triangular matrix) 'Rinv': None,
'''parent_path = pathlib.Path.home() parent_path = parent_path/'Dropbox/09. Aalto Postdoc/DiddyBorg_experiment' IMU = sf.Sensor('IMU',sf.IMU_COLUMNS,meas_record_file=parent_path/'test-run-imu.csv',is_linear=True,start_index=3686) IMU_static = sf.Sensor('IMU_static',sf.IMU_COLUMNS,meas_record_file=parent_path/'static_position_IMU.csv',is_linear=True) Motor_input = sf.Sensor('Motor_input',sf.MOTOR_COLUMNS,meas_record_file=parent_path/'test-run-Motor-Control.csv',is_linear=True) Camera = sf.Sensor('Camera',sf.CAMERA_COLUMNS,meas_record_file=parent_path/'test-run-camera.csv',is_linear=False,start_index=154) ''' camera_file = 'Datasets/data/task6/camera_tracking_task6.csv' imu_file = 'Datasets/data/task6/imu_tracking_task6.csv' imu_static = 'Datasets/data/task1/imu_reading_task1.csv' motor_file = 'Datasets/data/task6/motor_control_tracking_task6.csv' IMU = sf.Sensor('IMU', sf.IMU_COLUMNS, meas_record_file=imu_file, is_linear=True, start_index=3686) IMU_static = sf.Sensor('IMU_static', sf.IMU_COLUMNS, meas_record_file=imu_static, is_linear=True) Motor_input = sf.Sensor('Motor_input', sf.MOTOR_COLUMNS, meas_record_file=motor_file, is_linear=True) Camera = sf.Sensor('Camera', sf.CAMERA_COLUMNS, meas_record_file=camera_file, is_linear=False, start_index=154)
def EKF_estimate(self,motor_file = None, camera_file = None): cam = pd.read_csv(camera_file, header=None) print('cam ', np.shape(cam)) # Timestamp, v,w motor = pd.read_csv(motor_file, header=None) print('motor ', np.shape(motor)) cam_time = cam.iloc[0,0],cam.iloc[-1,0] motor_time = motor.iloc[0, 0], motor.iloc[-1, 0] print('cam_time:{}, motor_time:{}'.format(cam_time,motor_time)) motor = pd.read_csv(motor_file, header=None) print('motor ', np.shape(motor)) ''' -replace timesteps with int numbers autoincrement ---get data from camera first -> index of the detected qr codes ---based on current timestamp, get the clossest u from motor ---feed, landmarks pos and u to EKF ''' self.Camera = sf.Sensor('Camera', sf.CAMERA_COLUMNS, meas_record_file=camera_file, is_linear=False, start_index=0) self.x_true = np.array([15.7, 47.5, 90.0]) self.x_init = self.x_true # np.array([0,0,0]) x = np.zeros((self.Camera.meas_record.shape[0] // 3, 3), dtype=np.float) print('x shape is ', np.shape(x)) self.Camera.reset_sampling_index() i = 0 dt = 1 dt=3.2 dt = 6 sigma_range = 0.05 sigma_bearing = 0.05 sigma_vel = 0.01 sigma_steer = np.radians(1) ekf = RobotEKF(dt, wheelbase=0.12, sigma_vel=sigma_vel, sigma_steer=sigma_steer) ekf.x = array([[15.7, 47.5, np.deg2rad(90.0)]]).T ekf.P = np.diag([1, 1, 1]) ekf.R = np.diag([sigma_range ** 2, sigma_bearing ** 2]) while (self.Camera.current_sample_index < self.Camera.time.shape[0] and i < x.shape[0] - 1): i += 1 y_ = self.Camera.get_measurement() if y_.shape[0] < 2: continue qr_row = y_[:, 0].astype('int') camera_time = self.Camera.current_time landmarks = rnmf.QRCODE_LOCATIONS[qr_row, 1:] closest_index = motor.iloc[:, 0].sub(camera_time).abs().idxmin() closest_u = np.array(motor.iloc[closest_index, 1:3]) #print('qr_row len:{}, qr_row:{}, time:{}, landmarks:{},closest_u:{}'.format(np.shape(qr_row),qr_row,camera_time, np.shape(landmarks),closest_u)) sim_pos = ekf.x.copy() # simulated position u = closest_u # steering command (vel, steering angle radians) #u[1] = np.deg2rad(-u[1])*3 #u[0] *= 4 u[1] = np.deg2rad(-u[1]) #print('u ',u) plt.scatter(landmarks[:, 0], landmarks[:, 1], marker='s', s=60) sim_pos = ekf.move(sim_pos, u, dt) # simulate robot plt.plot(sim_pos[0], sim_pos[1], '-ok', linewidth=1, markersize=1) phi = sim_pos[-1] plt.quiver(sim_pos[0], sim_pos[1], np.cos(phi), np.sin(phi),linewidth=.1, alpha=.5, color='red') ekf.predict(u=u) p_x, p_y = sim_pos[0, 0], sim_pos[1, 0] for lmark in landmarks: d = np.sqrt((lmark[0] - p_x) ** 2 + (lmark[1] - p_y) ** 2) a = atan2(lmark[1] - p_y, lmark[0] - p_x) - sim_pos[2, 0] z = np.array([[d + np.random.randn() * sigma_range], [a + np.random.randn() * sigma_bearing]]) ekf.update(z, HJacobian=H_of, Hx=Hx, residual=residual, args=(lmark), hx_args=(lmark)) phi = np.deg2rad(self.x_init[-1]) plt.quiver(self.x_init[0], self.x_init[1], np.cos(phi), np.sin(phi), linewidth=2., alpha=1.0, color='blue', label='init-pose') plt.grid(True) plt.legend() plt.axis('equal') plt.show()
def localize(self,csvFile = None): self.csvFile = csvFile self.Camera = sf.Sensor('Camera', sf.CAMERA_COLUMNS, meas_record_file=csvFile, is_linear=False, start_index=0) R_one_diag = np.array([2, 10]) I_max,gamma,N = 20,1,10 params_LSQ = {'x_sensors': None, 'R': None, 'LR': None, 'Rinv': None, 'gamma': gamma, 'I_max': I_max, 'Line_search': False, 'Line_search_n_points': N, 'Jwls': lsqS.Jwls} self.Camera.reset_sampling_index() self.Camera.get_measurement() y_raw = self.Camera.get_measurement() w = y_raw[:, 3] h = y_raw[:, 4] C_x = y_raw[:, 1] di = self.h0 * self.f / h #according to equation 9 phi = np.arctan2(C_x, self.f) #according to equation 9 angle_qr = np.arccos(np.minimum(w, h) / h) corrected_dist = di / np.cos(phi) + 0.5 * self.h0 * np.sin(angle_qr) y_raw[:, 5] = corrected_dist #update the di column phi -= self.x_true[-1] #update the angle y_raw[:, 6] = phi n_qr_codes = y_raw.shape[0] y_measurements = y_raw[:,5:].flatten() #take di and phi as measurements print('y_measurements ', np.shape(y_measurements)) qr_pos = rnmf.QRCODE_LOCATIONS[y_raw[:, 0].astype('int'), 1:] params_LSQ['x_sensors'] = qr_pos R = np.diag(np.kron(np.ones(n_qr_codes), R_one_diag)) params_LSQ['R'] = R params_LSQ['LR'] = np.linalg.cholesky(R).T params_LSQ['Rinv'] = np.diag(1 / np.diag(R)) g = rnmf.h_cam #measurement model G = rnmf.H_cam #Jacobian of the measurement model method = 'gauss-newton' x_ls, J_ = lsqS.lsqsolve(y=y_measurements, g=g, G=G, x_init=self.x_init, params=params_LSQ,method=method) best_X = x_ls[:,-1].copy() best_X[-1] = np.rad2deg(best_X[-1]) print('x_true ', self.x_true) print('best_X ',best_X) print('x_ls:{}'.format(np.shape(x_ls))) fig, axs = plt.subplots(2, 2, figsize=(8, 6)) axs[0, 0].plot(x_ls[0, :], label='Px:{}'.format(np.round(best_X[0], 2))) axs[0, 0].set_title('X - position') axs[0, 0].set(xlabel='time', ylabel='X') axs[0, 0].grid(True) axs[0, 0].legend() axs[1, 0].plot(x_ls[1, :], label='Py:{}'.format(np.round(best_X[1], 2))) axs[1, 0].set_title('Y - position') axs[1, 0].set(xlabel='time', ylabel='Y') axs[1, 0].grid(True) axs[1, 0].legend() axs[0, 1].plot(x_ls[2, :] * rnmf.RAD_TO_DEG, label='Orientation:{}'.format(np.round(best_X[-1], 2))) axs[0, 1].set_title('Orientation') axs[0, 1].set(xlabel='time', ylabel='Orientation') axs[0, 1].grid(True) axs[0, 1].set_ylim([-180, 180]) axs[0, 1].legend() phi = np.deg2rad(best_X[-1]) print('phi ',phi) axs[1, 1].quiver(best_X[0], best_X[1],np.cos(phi), np.sin(phi), linewidth=2.9, alpha=1.0, color='red', label='estimated-pose') axs[1, 1].set_title('Final: X,Y and phi') axs[1, 1].set(xlabel='X', ylabel='Y') axs[1, 1].grid(True) axs[1, 1].set_ylim([-5, 50]) axs[1, 1].set_xlim([-5, 100]) axs[1, 1].quiver(x_ls[0,:],x_ls[1,:], np.cos(np.deg2rad(x_ls[2,:])),np.sin(np.deg2rad(x_ls[2,:])), linewidth=0.5, alpha=0.5, color='green') axs[1, 1].quiver(self.x_init[0], self.x_init[1], np.cos(np.deg2rad(self.x_init[-1])), np.sin(np.deg2rad(self.x_init[-1])), linewidth=2.9, alpha=1.0, color='blue', label='init-pose') axs[1, 1].legend(loc='upper left') plt.show()
def Estimate_pose_based_on_camera(self,cam_csv=None): self.Camera = sf.Sensor('Camera',sf.CAMERA_COLUMNS,meas_record_file=cam_csv,is_linear=False,start_index=0) self.x_true = np.array([15.7, 47.5, 90.0]) self.x_init = self.x_true# np.array([0,0,0]) x = np.zeros((self.Camera.meas_record.shape[0] // 3, 3), dtype=np.float) x[0, :] = self.x_init t = np.zeros(x.shape[0]) t[0] = self.Camera.time[0] R_one_diag = np.array([2, 20]) I_max,gamma,N = 10,1, 10 params_LSQ = {'x_sensors': None, 'R': None, 'LR': None, 'Rinv': None, 'gamma': gamma, 'I_max': I_max, 'Line_search': False, 'Line_search_n_points': N, 'Jwls': lsqS.Jwls } self.Camera.reset_sampling_index() i = 0 while (self.Camera.current_sample_index < self.Camera.time.shape[0] and i < x.shape[0] - 1): i += 1 y_ = self.Camera.get_measurement() n_qr_codes = y_.shape[0] if n_qr_codes < 3: x[i, :] = x[i - 1, :] continue w,h,c_x = y_[:, 3],y_[:, 4],y_[:, 1] di = self.h0 * self.f / h phi = np.arctan2(c_x, self.f) angle_qr = np.arccos(np.minimum(w, h) / h) corrected_dist = di / np.cos(phi) + 0.5 * self.h0 * np.sin(angle_qr) y_[:, 5] = corrected_dist y = y_[:, 5:].flatten() qr_row = y_[:, 0].astype('int') qr_pos = rnmf.QRCODE_LOCATIONS[qr_row, 1:] params_LSQ['x_sensors'] = qr_pos R = np.diag(np.kron(np.ones(n_qr_codes), R_one_diag)) params_LSQ['R'] = R params_LSQ['LR'] = np.linalg.cholesky(R).T params_LSQ['Rinv'] = np.diag(1 / np.diag(R)) xhat_history_GN, J_history_GN = lsqS.lsqsolve(y, rnmf.h_cam, rnmf.H_cam, x[i - 1, :], params_LSQ, method='gauss-newton') x[i, :] = xhat_history_GN[:, -1] plt.figure() plt.title('Camera measurements & Jwls estimation') plt.plot(x[1:,0],x[1:,1],'-ok',linewidth=1,markersize=3, label='position') phi = x[1:,2] #plt.quiver(x[1:,0], x[1:,1], np.cos(phi), np.sin(phi),linewidth=2.9, alpha=1.0, color='red', label='Orientation') plt.quiver(x[1:, 0], x[1:, 1], -np.sin(phi), np.cos(phi),linewidth=2.9, alpha=1.0, color='red', label='Orientation') plt.grid(True) plt.quiver(x[-1,0], x[-1,1], -np.sin(phi[-1]), np.cos(phi[-1]), linewidth=3., alpha=1.0, color='green', label='final-pose') plt.quiver(self.x_init[0], self.x_init[1], np.cos(np.deg2rad(self.x_init[-1])), np.sin(np.deg2rad(self.x_init[-1])), linewidth=3., alpha=1.0, color='blue', label='init-pose') plt.legend() plt.show()