def compute_state_space_trajectory_and_derivatives(p_body,p_look_at,y_axis_cam_hint,dt): num_timesteps = len(p_body) # # compute the yaw, roll, and pitch of the quad using differential flatness # p_body_dot = c_[ gradient(p_body[:,0],dt), gradient(p_body[:,1],dt), gradient(p_body[:,2],dt)] p_body_dot_dot = c_[ gradient(p_body_dot[:,0],dt), gradient(p_body_dot[:,1],dt), gradient(p_body_dot[:,2],dt)] f_thrust_world = m*p_body_dot_dot - f_external_world.T.A f_thrust_world_normalized = sklearn.preprocessing.normalize(f_thrust_world) y_axis_body = f_thrust_world_normalized z_axis_body = sklearn.preprocessing.normalize(cross(y_axis_body, p_look_at - p_body)) x_axis_body = sklearn.preprocessing.normalize(cross(z_axis_body, y_axis_body)) R_world_from_body = zeros((num_timesteps,4,4)) theta_body = zeros(num_timesteps) psi_body = zeros(num_timesteps) phi_body = zeros(num_timesteps) for ti in range(num_timesteps): z_axis_body_ti = c_[matrix(z_axis_body[ti]),0].T y_axis_body_ti = c_[matrix(y_axis_body[ti]),0].T x_axis_body_ti = c_[matrix(x_axis_body[ti]),0].T R_world_from_body_ti = c_[z_axis_body_ti,y_axis_body_ti,x_axis_body_ti,[0,0,0,1]] psi_body_ti,theta_body_ti,phi_body_ti = transformations.euler_from_matrix(R_world_from_body_ti,"ryxz") assert allclose(R_world_from_body_ti, transformations.euler_matrix(psi_body_ti,theta_body_ti,phi_body_ti,"ryxz")) R_world_from_body[ti] = R_world_from_body_ti theta_body[ti] = theta_body_ti psi_body[ti] = psi_body_ti phi_body[ti] = phi_body_ti psi_body = trigutils.compute_continuous_angle_array(psi_body) theta_body = trigutils.compute_continuous_angle_array(theta_body) phi_body = trigutils.compute_continuous_angle_array(phi_body) # # now that we have the full orientation of the quad, compute the full orientation of the camera relative to the quad # x_axis_cam = sklearn.preprocessing.normalize( p_look_at - p_body ) z_axis_cam = sklearn.preprocessing.normalize( cross(y_axis_cam_hint, x_axis_cam) ) y_axis_cam = sklearn.preprocessing.normalize( cross(x_axis_cam, z_axis_cam) ) theta_cam = zeros(num_timesteps) psi_cam = zeros(num_timesteps) phi_cam = zeros(num_timesteps) for ti in range(num_timesteps): z_axis_cam_ti = c_[matrix(z_axis_cam[ti]),0].T y_axis_cam_ti = c_[matrix(y_axis_cam[ti]),0].T x_axis_cam_ti = c_[matrix(x_axis_cam[ti]),0].T R_world_from_cam_ti = matrix(c_[z_axis_cam_ti, y_axis_cam_ti, x_axis_cam_ti, [0,0,0,1]]) R_world_from_body_ti = matrix(R_world_from_body[ti]) R_body_from_cam_ti = R_world_from_body_ti.T*R_world_from_cam_ti psi_cam_ti, theta_cam_ti, phi_cam_ti = transformations.euler_from_matrix(R_body_from_cam_ti,"ryxz") # # sanity check that world-from-body rotation matrix we compute actually # maps the vector [0,0,1] to the quadrotor x axis # assert allclose(c_[matrix(x_axis_body[ti]),1].T, R_world_from_body_ti*matrix([0,0,1,1]).T) # # sanity check that the world-from-camera rotation matrix we compute actually # maps the vector [0,0,1] to the camera x axis # assert allclose(c_[matrix(x_axis_cam[ti]),1].T, R_world_from_cam_ti*matrix([0,0,1,1]).T) # # sanity check that the world-from-body and body-from-camera rotation matrices # we compute actually maps the vector [0,0,1] to the camera x axis # assert allclose(c_[matrix(x_axis_cam[ti]),1].T, R_world_from_body_ti*R_body_from_cam_ti*matrix([0,0,1,1]).T) theta_cam[ti] = theta_cam_ti psi_cam[ti] = psi_cam_ti phi_cam[ti] = phi_cam_ti theta_cam = trigutils.compute_continuous_angle_array(theta_cam) psi_cam = trigutils.compute_continuous_angle_array(psi_cam) phi_cam = trigutils.compute_continuous_angle_array(phi_cam) # # assert that we never need any camera yaw in the body frame of the quad # assert allclose(psi_cam, 0) # # compute derivatives # theta_body_dot = gradient(theta_body,dt) psi_body_dot = gradient(psi_body,dt) phi_body_dot = gradient(phi_body,dt) theta_cam_dot = gradient(theta_cam,dt) psi_cam_dot = gradient(psi_cam,dt) phi_cam_dot = gradient(phi_cam,dt) theta_body_dot_dot = gradient(theta_body_dot,dt) psi_body_dot_dot = gradient(psi_body_dot,dt) phi_body_dot_dot = gradient(phi_body_dot,dt) theta_cam_dot_dot = gradient(theta_cam_dot,dt) psi_cam_dot_dot = gradient(psi_cam_dot,dt) phi_cam_dot_dot = gradient(phi_cam_dot,dt) return p_body, p_body_dot, p_body_dot_dot, theta_body, theta_body_dot, theta_body_dot_dot, psi_body, psi_body_dot, psi_body_dot_dot, phi_body, phi_body_dot, phi_body_dot_dot, theta_cam, theta_cam_dot, theta_cam_dot_dot, psi_cam, psi_cam_dot, psi_cam_dot_dot, phi_cam, phi_cam_dot, phi_cam_dot_dot
def compute_manipulator_matrices(x): front_prop_and_quad_positive_x_axis_angle = pi/4 rear_prop_and_quad_negative_x_axis_angle = pi/4 y_axis_torque_per_newton_per_prop = 1 alpha = front_prop_and_quad_positive_x_axis_angle beta = rear_prop_and_quad_negative_x_axis_angle gamma = y_axis_torque_per_newton_per_prop p_body, theta_body, psi_body, phi_body, theta_cam, psi_cam, phi_cam, p_body_dot, theta_body_dot, psi_body_dot, phi_body_dot, theta_cam_dot, psi_cam_dot, phi_cam_dot, q, q_dot = unpack_state(x) R_world_from_body = matrix(transformations.euler_matrix(psi_body,theta_body,phi_body,"ryxz"))[0:3,0:3] R_body_from_world = R_world_from_body.T R_body_from_cam = matrix(transformations.euler_matrix(psi_cam,theta_cam,phi_cam,"ryxz"))[0:3,0:3] R_cam_from_body = R_body_from_cam.T subs_body = array( [ theta_body, psi_body, phi_body, theta_body_dot, psi_body_dot, phi_body_dot ] ) A_body = matrix(sympyutils.evaluate_matrix_anon_funcs( A_anon_funcs_ufuncify, subs_body[newaxis,:] )) A_body_dot = matrix(sympyutils.evaluate_matrix_anon_funcs( A_dot_anon_funcs_ufuncify, subs_body[newaxis,:] )) subs_cam = array( [ theta_cam, psi_cam, phi_cam, theta_cam_dot, psi_cam_dot, phi_cam_dot ] ) A_cam = matrix(sympyutils.evaluate_matrix_anon_funcs( A_anon_funcs_ufuncify, subs_cam[newaxis,:] )) A_cam_dot = matrix(sympyutils.evaluate_matrix_anon_funcs( A_dot_anon_funcs_ufuncify, subs_cam[newaxis,:] )) euler_body_dot = matrix([theta_body_dot,psi_body_dot,phi_body_dot]).T omega_body_in_body = R_body_from_world*A_body*euler_body_dot I_body_omega_body_in_body_X = linalgutils.cross_product_left_term_matrix_from_vector(I_body*omega_body_in_body) euler_cam_dot = matrix([theta_cam_dot,psi_cam_dot,phi_cam_dot]).T omega_cam_in_cam = R_cam_from_body*A_cam*euler_cam_dot I_cam_omega_cam_in_cam_X = linalgutils.cross_product_left_term_matrix_from_vector(I_cam*omega_cam_in_cam) M_thrust_body_from_control = matrix([[0,0,0,0],[1,1,1,1],[0,0,0,0]]) M_torque_body_from_control = matrix([[-d*cos(alpha),d*cos(beta),d*cos(beta),-d*cos(alpha)],[gamma,-gamma,gamma,-gamma],[d*sin(alpha),d*sin(beta),-d*sin(beta),-d*sin(alpha)]]) # H H_00 = matrix(m*identity(3)) H_11 = I_body*R_body_from_world*A_body H_22 = A_cam H_zeros = matrix(zeros((3,3))) # C C_11 = I_body*R_body_from_world*A_body_dot - I_body_omega_body_in_body_X*R_body_from_world*A_body C_22 = A_cam_dot C_zeros = matrix(zeros((3,3))) # G G_0 = -f_external_world G_1 = matrix(zeros((3,1))) G_2 = matrix(zeros((3,1))) # B B_00 = R_world_from_body*M_thrust_body_from_control B_01 = matrix(zeros((3,3))) B_10 = M_torque_body_from_control B_11 = matrix(zeros((3,3))) B_20 = matrix(zeros((3,4))) B_21 = matrix(identity(3)) H = bmat( [ [H_00, H_zeros, H_zeros], [H_zeros, H_11, H_zeros], [H_zeros, H_zeros, H_22 ] ] ) C = bmat( [ [C_zeros, C_zeros, C_zeros], [C_zeros, C_11, C_zeros], [C_zeros, C_zeros, C_22 ] ] ) G = bmat( [ [G_0], [G_1],[G_2] ] ) B = bmat( [ [B_00, B_01], [B_10, B_11], [B_20, B_21] ] ) return H, C, G, B
def calculate_unoptimized_trajectory_ned(P_lookFrom_spline, T_lookFrom_spline, P_lookAt_spline, T_lookAt_spline, P_lookFrom_ease, T_lookFrom_ease, P_lookAt_ease, T_lookAt_ease, total_time, refLLH): t_begin = 0.0 t_end = total_time t_nominal = linspace(t_begin, t_end, num_timesteps) dt = (t_end - t_begin) / num_timesteps look_from_eval, look_from_easing_eval = _evaluate_splines_and_convert_to_meters( P_lookFrom_spline, T_lookFrom_spline, P_lookFrom_ease, T_lookFrom_ease, num_samples=num_timesteps) look_at_eval, look_at_easing_eval = _evaluate_splines_and_convert_to_meters( P_lookAt_spline, T_lookAt_spline, P_lookAt_ease, T_lookAt_ease, num_samples=num_timesteps) # # adjust to match flashlight library convention of z,y,x axis ordering where y is up # look_from_eval = look_from_eval[:, [0, 2, 1]] look_at_eval = look_at_eval[:, [0, 2, 1]] look_from_eval[:, 1] *= -1 look_at_eval[:, 1] *= -1 look_from_user_progress, _, _, _ = curveutils.reparameterize_curve( look_from_eval, look_from_easing_eval) look_at_user_progress, _, _, _ = curveutils.reparameterize_curve( look_at_eval, look_at_easing_eval) # # use quadrotor camera algorithm to compute nominal trajectory # p_body = look_from_user_progress p_look_at = look_at_user_progress p_body_dotN = gradientutils.gradients_vector_wrt_scalar_smooth_boundaries( p_body, dt, max_gradient=2, poly_deg=5) p_body_dot = p_body_dotN[1] p_body_dot_dot = p_body_dotN[2] f_thrust_world = quadrotor3d.m * p_body_dot_dot - quadrotor3d.f_external_world.T.A f_thrust_world_normalized = sklearn.preprocessing.normalize(f_thrust_world) y_axis_body = f_thrust_world_normalized z_axis_body = sklearn.preprocessing.normalize( cross(y_axis_body, p_look_at - p_body)) x_axis_body = sklearn.preprocessing.normalize( cross(z_axis_body, y_axis_body)) theta_body = zeros(num_timesteps) psi_body = zeros(num_timesteps) phi_body = zeros(num_timesteps) for ti in range(num_timesteps): z_axis_body_ti = c_[matrix(z_axis_body[ti]), 0].T y_axis_body_ti = c_[matrix(y_axis_body[ti]), 0].T x_axis_body_ti = c_[matrix(x_axis_body[ti]), 0].T R_world_from_body_ti = c_[z_axis_body_ti, y_axis_body_ti, x_axis_body_ti, [0, 0, 0, 1]] psi_body_ti, theta_body_ti, phi_body_ti = transformations.euler_from_matrix( R_world_from_body_ti, "ryxz") assert allclose( R_world_from_body_ti, transformations.euler_matrix(psi_body_ti, theta_body_ti, phi_body_ti, "ryxz")) theta_body[ti] = theta_body_ti psi_body[ti] = psi_body_ti phi_body[ti] = phi_body_ti psi_body = trigutils.compute_continuous_angle_array(psi_body) theta_body = trigutils.compute_continuous_angle_array(theta_body) phi_body = trigutils.compute_continuous_angle_array(phi_body) theta_body_dotN = gradientutils.gradients_scalar_wrt_scalar_smooth_boundaries( theta_body, dt, max_gradient=2, poly_deg=5) psi_body_dotN = gradientutils.gradients_scalar_wrt_scalar_smooth_boundaries( psi_body, dt, max_gradient=2, poly_deg=5) phi_body_dotN = gradientutils.gradients_scalar_wrt_scalar_smooth_boundaries( phi_body, dt, max_gradient=2, poly_deg=5) theta_body_dot = theta_body_dotN[1] psi_body_dot = psi_body_dotN[1] phi_body_dot = phi_body_dotN[1] theta_body_dot_dot = theta_body_dotN[2] psi_body_dot_dot = psi_body_dotN[2] phi_body_dot_dot = phi_body_dotN[2] q_q_dot_q_dot_dot_nominal = p_body, p_body_dot, p_body_dot_dot, theta_body, theta_body_dot, theta_body_dot_dot, psi_body, psi_body_dot, psi_body_dot_dot, phi_body, phi_body_dot, phi_body_dot_dot u_nominal = quadrotor3d.compute_control_trajectory( q_q_dot_q_dot_dot_nominal) p_nominal, p_dot_nominal, p_dot_dot_nominal, theta_nominal, theta_dot_nominal, theta_dot_dot_nominal, psi_nominal, psi_dot_nominal, psi_dot_dot_nominal, phi_nominal, phi_dot_nominal, phi_dot_dot_nominal = q_q_dot_q_dot_dot_nominal q_nominal = c_[p_nominal, theta_nominal, psi_nominal, phi_nominal] qdot_nominal = c_[p_dot_nominal, theta_dot_nominal, psi_dot_nominal, phi_dot_nominal] x_nominal = c_[q_nominal, qdot_nominal] dt_nominal = dt T_final_nominal = t_nominal[-1] # # color values # constraint_violation_score_norm_n1p1, constraint_violation_score_norm_01, constraint_violation_colors = \ quadrotor3d.compute_normalized_constraint_violation_score(x_nominal,u_nominal,x_min_ti,x_max_ti,u_min_ti,u_max_ti) # # save_results # np.savez("tmp_calculate_unoptimized_trajectory_ned_result", \ p_nominal=p_nominal, \ psi_nominal=psi_nominal, \ user_progress=look_from_easing_eval, \ dt=array(dt), \ x_nominal=x_nominal, \ u_nominal=u_nominal, \ dt_nominal=dt, \ t_nominal=t_nominal, \ T_final_nominal=T_final_nominal, constraint_violation_colors=constraint_violation_colors) return x_nominal, u_nominal, dt_nominal, t_nominal
def calculate_optimized_easing_curve_gaussian_ned( P_lookFrom_spline, T_lookFrom_spline, P_lookAt_spline, T_lookAt_spline, P_lookFrom_ease, T_lookFrom_ease, P_lookAt_ease, T_lookAt_ease, total_time, refLLH): t_begin = 0.0 t_end = total_time t_nominal = linspace(t_begin, t_end, num_timesteps) dt = (t_end - t_begin) / num_timesteps look_from_eval, look_from_easing_eval = _evaluate_splines_and_convert_to_meters( P_lookFrom_spline, T_lookFrom_spline, P_lookFrom_ease, T_lookFrom_ease, num_samples=num_timesteps) look_at_eval, look_at_easing_eval = _evaluate_splines_and_convert_to_meters( P_lookAt_spline, T_lookAt_spline, P_lookAt_ease, T_lookAt_ease, num_samples=num_timesteps) # # adjust to match flashlight library convention of z,y,x axis ordering where y is up # look_from_eval = look_from_eval[:, [0, 2, 1]] look_at_eval = look_at_eval[:, [0, 2, 1]] look_from_eval[:, 1] *= -1 look_at_eval[:, 1] *= -1 # # new strategy: figure out the look-at values that correspond to the original sampling of look-from values # look_from_original = look_from_eval v_norm_eval = linspace(0.0, 1.0, num_timesteps) look_from_user_progress, look_from_v_norm_user_progress, _, _ = curveutils.reparameterize_curve( look_from_eval, look_from_easing_eval) look_at_user_progress, look_at_v_norm_user_progress, _, _ = curveutils.reparameterize_curve( look_at_eval, look_at_easing_eval) t_look_from_original = interpolateutils.resample_scalar_wrt_scalar( look_from_v_norm_user_progress, t_nominal, v_norm_eval) look_at_v_norm_original = interpolateutils.resample_scalar_wrt_scalar( t_nominal, look_at_v_norm_user_progress, t_look_from_original) look_at_original = interpolateutils.resample_vector_wrt_scalar( v_norm_eval, look_at_eval, look_at_v_norm_original) # # use quadrotor camera algorithm to compute psi values # p_body = look_from_original p_look_at = look_at_original p_body_dotN = gradientutils.gradients_vector_wrt_scalar_smooth_boundaries_nonconst_dt( p_body, t_look_from_original, max_gradient=2, poly_deg=5) p_body_dot = p_body_dotN[1] p_body_dot_dot = p_body_dotN[2] f_thrust_world = quadrotor3d.m * p_body_dot_dot - quadrotor3d.f_external_world.T.A f_thrust_world_normalized = sklearn.preprocessing.normalize(f_thrust_world) y_axis_body = f_thrust_world_normalized z_axis_body = sklearn.preprocessing.normalize( cross(y_axis_body, p_look_at - p_body)) x_axis_body = sklearn.preprocessing.normalize( cross(z_axis_body, y_axis_body)) psi_body = zeros(num_timesteps) for ti in range(num_timesteps): z_axis_body_ti = c_[matrix(z_axis_body[ti]), 0].T y_axis_body_ti = c_[matrix(y_axis_body[ti]), 0].T x_axis_body_ti = c_[matrix(x_axis_body[ti]), 0].T R_world_from_body_ti = c_[z_axis_body_ti, y_axis_body_ti, x_axis_body_ti, [0, 0, 0, 1]] psi_body_ti, theta_body_ti, phi_body_ti = transformations.euler_from_matrix( R_world_from_body_ti, "ryxz") assert allclose( R_world_from_body_ti, transformations.euler_matrix(psi_body_ti, theta_body_ti, phi_body_ti, "ryxz")) psi_body[ti] = psi_body_ti psi_body = trigutils.compute_continuous_angle_array(psi_body) # # call the gaussian time stretching optimizer # p_eval = p_body psi_eval = psi_body user_progress = look_from_easing_eval const_vals_ti = hstack([ quadrotor3d.alpha, quadrotor3d.beta, quadrotor3d.gamma, quadrotor3d.d, quadrotor3d.m, quadrotor3d.I.A1, quadrotor3d.f_external_world.A1 ]) max_stretch_iters_feasible = 100 gauss_width_in_terms_of_dt_feasible = 200.0 gauss_max_in_terms_of_dt_feasible = 0.2 extra_iters_feasible = 1 # # gaussian time stretch # gaussian_time_stretch_optimized_trajectory = quadrotor3d_gaussian_time_stretch.optimize_feasible( p_eval,psi_eval, \ t_nominal,user_progress,dt, \ x_min_ti, x_max_ti, \ u_min_ti, u_max_ti, \ max_stretch_iters_feasible, \ gauss_width_in_terms_of_dt_feasible, \ gauss_max_in_terms_of_dt_feasible, \ extra_iters_feasible ) X_opt, U_opt, t_opt, user_progress_opt, dt_opt = gaussian_time_stretch_optimized_trajectory s_opt = user_progress_opt T_final_opt = t_opt[-1] # # color values # constraint_violation_score_norm_n1p1, constraint_violation_score_norm_01, constraint_violation_colors = \ quadrotor3d.compute_normalized_constraint_violation_score(X_opt,U_opt,x_min_ti,x_max_ti,u_min_ti,u_max_ti) # # save_results # np.savez("tmp_calculate_optimized_easing_curve_gaussian_ned_result", \ p_eval=p_eval, \ psi_eval=psi_eval, \ t_nominal=t_nominal, \ user_progress=user_progress, \ dt=array(dt), \ const_vals_ti=const_vals_ti, \ x_min_ti=x_min_ti, \ x_max_ti=x_max_ti, \ u_min_ti=u_min_ti, \ u_max_ti=u_max_ti, \ s_opt=s_opt, \ X_opt=X_opt, \ U_opt=U_opt, \ t_opt=t_opt, \ T_final_opt=T_final_opt, \ constraint_violation_colors=constraint_violation_colors) return s_opt, X_opt, U_opt, t_opt, constraint_violation_colors
def calculate_optimized_easing_curve_ned(P_lookFrom_spline, T_lookFrom_spline, P_lookAt_spline, T_lookAt_spline, P_lookFrom_ease, T_lookFrom_ease, P_lookAt_ease, T_lookAt_ease, total_time, refLLH): t_begin = 0.0 t_end = total_time t_nominal = linspace(t_begin, t_end, num_timesteps) dt = (t_end - t_begin) / num_timesteps look_from_eval, look_from_easing_eval = _evaluate_splines_and_convert_to_meters( P_lookFrom_spline, T_lookFrom_spline, P_lookFrom_ease, T_lookFrom_ease, num_samples=num_timesteps) look_at_eval, look_at_easing_eval = _evaluate_splines_and_convert_to_meters( P_lookAt_spline, T_lookAt_spline, P_lookAt_ease, T_lookAt_ease, num_samples=num_timesteps) # # adjust to match flashlight library convention of z,y,x axis ordering where y is up # look_from_eval = look_from_eval[:, [0, 2, 1]] look_at_eval = look_at_eval[:, [0, 2, 1]] look_from_eval[:, 1] *= -1 look_at_eval[:, 1] *= -1 # # old strategy: use reparameterized curve to compute yaw values # # look_from_user_progress, look_from_v_norm_user_progress, _, _ = reparameterize_curve(look_from_eval,look_from_easing_eval) # look_at_user_progress, look_at_v_norm_user_progress, _, _ = reparameterize_curve(look_at_eval,look_at_easing_eval) # y_axis_cam_hint_nominal = c_[ zeros_like(t_nominal), ones_like(t_nominal), zeros_like(t_nominal) ] # q_q_dot_q_dot_dot_nominal = compute_state_space_trajectory_and_derivatives(look_from_user_progress,look_at_user_progress,y_axis_cam_hint_nominal,dt) # p_body_nominal, p_body_dot_nominal, p_body_dot_dot_nominal, theta_body_nominal, theta_body_dot_nominal, theta_body_dot_dot_nominal, psi_body_nominal, psi_body_dot_nominal, psi_body_dot_dot_nominal, phi_body_nominal, phi_body_dot_nominal, phi_body_dot_dot_nominal, theta_cam_nominal, theta_cam_dot_nominal, theta_cam_dot_dot_nominal, psi_cam_nominal, psi_cam_dot_nominal, psi_cam_dot_dot_nominal, phi_cam_nominal, phi_cam_dot_nominal, phi_cam_dot_dot_nominal = q_q_dot_q_dot_dot_nominal # assert allclose(look_from_user_progress,p_body_nominal) # p_body = p_body_nominal # psi_body = psi_body_nominal # # new strategy: figure out the look-at values that correspond to the original sampling of look-from values # look_from_original = look_from_eval v_norm_eval = linspace(0.0, 1.0, num_timesteps) look_from_user_progress, look_from_v_norm_user_progress, _, _ = curveutils.reparameterize_curve( look_from_eval, look_from_easing_eval) look_at_user_progress, look_at_v_norm_user_progress, _, _ = curveutils.reparameterize_curve( look_at_eval, look_at_easing_eval) t_look_from_original = interpolateutils.resample_scalar_wrt_scalar( look_from_v_norm_user_progress, t_nominal, v_norm_eval) look_at_v_norm_original = interpolateutils.resample_scalar_wrt_scalar( t_nominal, look_at_v_norm_user_progress, t_look_from_original) look_at_original = interpolateutils.resample_vector_wrt_scalar( v_norm_eval, look_at_eval, look_at_v_norm_original) # # use quadrotor camera algorithm to compute psi values # p_body = look_from_original p_look_at = look_at_original p_body_dotN = gradientutils.gradients_vector_wrt_scalar_smooth_boundaries_nonconst_dt( p_body, t_look_from_original, max_gradient=2, poly_deg=5) p_body_dot = p_body_dotN[1] p_body_dot_dot = p_body_dotN[2] f_thrust_world = quadrotor3d.m * p_body_dot_dot - quadrotor3d.f_external_world.T.A f_thrust_world_normalized = sklearn.preprocessing.normalize(f_thrust_world) y_axis_body = f_thrust_world_normalized z_axis_body = sklearn.preprocessing.normalize( cross(y_axis_body, p_look_at - p_body)) x_axis_body = sklearn.preprocessing.normalize( cross(z_axis_body, y_axis_body)) psi_body = zeros(num_timesteps) for ti in range(num_timesteps): z_axis_body_ti = c_[matrix(z_axis_body[ti]), 0].T y_axis_body_ti = c_[matrix(y_axis_body[ti]), 0].T x_axis_body_ti = c_[matrix(x_axis_body[ti]), 0].T R_world_from_body_ti = c_[z_axis_body_ti, y_axis_body_ti, x_axis_body_ti, [0, 0, 0, 1]] psi_body_ti, theta_body_ti, phi_body_ti = transformations.euler_from_matrix( R_world_from_body_ti, "ryxz") assert allclose( R_world_from_body_ti, transformations.euler_matrix(psi_body_ti, theta_body_ti, phi_body_ti, "ryxz")) psi_body[ti] = psi_body_ti psi_body = trigutils.compute_continuous_angle_array(psi_body) # # call the fixed path optimizer # p_eval = p_body psi_eval = psi_body user_progress = look_from_easing_eval const_vals_ti = hstack([ quadrotor3d.alpha, quadrotor3d.beta, quadrotor3d.gamma, quadrotor3d.d, quadrotor3d.m, quadrotor3d.I.A1, quadrotor3d.f_external_world.A1 ]) opt_problem_type = "track" fixed_path_optimized_trajectory = quadrotor3d_fixed_path.optimize( p_eval,psi_eval, \ t_nominal,user_progress,dt, \ const_vals_ti, \ x_min_ti, x_max_ti, \ u_min_ti, u_max_ti, \ opt_problem_type ) s_opt, Beta_opt, Gamma_opt, X_opt, U_opt, t_opt, T_final_opt, solver_time, obj_vals = fixed_path_optimized_trajectory # # compute look-at position and easing curve # s_lookat_opt = look_at_easing_eval p_lookat_opt = look_at_user_progress # # color values # constraint_violation_score_norm_n1p1, constraint_violation_score_norm_01, constraint_violation_colors = \ quadrotor3d.compute_normalized_constraint_violation_score(X_opt,U_opt,x_min_ti,x_max_ti,u_min_ti,u_max_ti) # # save_results # np.savez("tmp_calculate_optimized_easing_curve_ned_result", \ p_eval=p_eval, \ psi_eval=psi_eval, \ t_nominal=t_nominal, \ user_progress=user_progress, \ dt=array(dt), \ const_vals_ti=const_vals_ti, \ x_min_ti=x_min_ti, \ x_max_ti=x_max_ti, \ u_min_ti=u_min_ti, \ u_max_ti=u_max_ti, \ opt_problem_type=array(opt_problem_type), \ s_opt=s_opt, \ Beta_opt=Beta_opt, \ Gamma_opt=Gamma_opt, \ X_opt=X_opt, \ U_opt=U_opt, \ t_opt=t_opt, \ T_final_opt=T_final_opt, \ solver_time=solver_time, \ obj_vals=obj_vals, \ constraint_violation_colors=constraint_violation_colors) return s_opt, X_opt, U_opt, t_opt, s_lookat_opt, p_lookat_opt, constraint_violation_colors