# Image processing, compute optical flow frame_num += 1 frame_gray = picam.get_frame() next = optflow.undistort(frame_gray) flow = cv2.calcOpticalFlowFarneback(prvs, next, None, 0.5, 3, 15, 3, 5, 1.1, 0) # speed elapsed_time = time.time() - start_time start_time = time.time() sl, sr = optflow.get_speed(flow, left_filter, right_filter, elapsed_time) # update CX neurons drone_heading = drone.heading / 180.0 * np.pi velocity = np.array([sl, sr]) __, __, tb1_optical, __, __, memory_optical, cpu4_optical, __, motor_optical = \ update_cells(heading=drone_heading, velocity=velocity, tb1=tb1_optical, \ memory=memory_optical, cx=cx_optical) velocity = drone.velocity if velocity[0]: left_real = (velocity[0]*np.cos(drone_heading-np.pi/4) + \ velocity[1]*np.cos(drone_heading-np.pi/4-np.pi/2)) right_real = (velocity[0]*np.cos(drone_heading+np.pi/4) + \ velocity[1]*np.cos(drone_heading+np.pi/4-np.pi/2)) velocity_gps = np.array([left_real, right_real ]) / 4.0 # normarlize velocity [-1,1] __, __, tb1_gps, __, __, memory_gps, cpu4_gps, __, motor_gps = \ update_cells(heading=drone_heading, velocity=velocity_gps, \ tb1=tb1_gps, memory=memory_gps, cx=cx_gps) # write the frame for later recheck if RECORDING == 'true':
speed_right[-1] = sr ax1.clear() ax2.clear() ax3.clear() ax4.clear() ax1.plot(x_axis, speed_left, 'r-') ax2.plot(x_axis, speed_right, 'b-') ax3.plot(x_axis, speed_left, 'r-') ax3.plot(x_axis, speed_right, 'b-') #ax4.bar(index, optical_direction, color='r', label='degree') plt.draw() # updare cx_neurons velocity = np.array([sl, sr]) tl2, cl1, tb1, tn1, tn2, memory, cpu4, cpu1, motor = update_cells( heading=np.pi, velocity=velocity, tb1=tb1, memory=memory, cx=cx) angle, distance = cx.decode_cpu4(cpu4) angle_list[frame_num] = angle/np.pi*180.0 distance_list[frame_num] = distance # show frames cv2.imshow('vedio', cv2.resize(draw_flow(next, flow), (0,0), fx=3.0, fy=3.0)) #print('Frame number: ', frame_num) ch = cv2.waitKey(5) & 0xFF if ch == ord('q'): break elif ch == ord(' '): while True: ch = cv2.waitKey(5) & 0xFF if ch == ord(' ') or ch == ord('q'): break
start_time = time.time() print "Start to update CX model, switch mode to end" while drone.mode.name == "AUTO": frame_num += 1 # update CX neurons drone_heading = drone.heading/180.0*np.pi velocity = drone.velocity if velocity[0]: left_real = (velocity[0]*np.cos(drone_heading-np.pi/4) + \ velocity[1]*np.cos(drone_heading-np.pi/4-np.pi/2)) right_real = (velocity[0]*np.cos(drone_heading+np.pi/4) + \ velocity[1]*np.cos(drone_heading+np.pi/4-np.pi/2)) velocity_gps = np.array([left_real, right_real]) / 4.0 # normarlize velocity [-1,1] __, __, tb1_gps, __, __, memory_gps, cpu4_gps, __, motor_gps = \ update_cells(heading=drone_heading, velocity=velocity_gps, \ tb1=tb1_gps, memory=memory_gps, cx=cx_gps) elapsed_time = time.time() - start_time start_time = time.time() # logging logging.info('sl:{} sr:{} heading:{} velocity:{} position:{}'.format( sl,sr,drone.heading,drone.velocity, drone.location.global_relative_frame)) angle_gps, distance_gps = cx_gps.decode_cpu4(cpu4_gps) logging.info('Angle_optical:{} Distance_optical:{} Angle_gps:{} Distance_gps:{} \ elapsed_time:{}'.format((angle_optical/np.pi)*180.0, distance_optical, \ (angle_gps/np.pi)*180.0, distance_gps, elapsed_time)) # moniter the mission if nextwaypoint < len(drone.commands): if frame_num%20==0: display_seq = drone.commands.next
speed_right[-1] = sr ax1.clear() ax2.clear() ax3.clear() ax4.clear() ax1.plot(x_axis, speed_left, 'r-') ax2.plot(x_axis, speed_right, 'b-') ax3.plot(x_axis, speed_left, 'r-') ax3.plot(x_axis, speed_right, 'b-') #ax4.bar(index, optical_direction, color='r', label='degree') plt.draw() # updare cx_neurons velocity = np.array([sl, sr]) tl2, cl1, tb1, tn1, tn2, memory, cpu4, cpu1, motor = update_cells( heading=heading_list[frame_num]/180.0*np.pi, velocity=velocity, tb1=tb1, memory=memory, cx=cx) angle, distance = cx.decode_cpu4(cpu4) angle_list[frame_num] = angle/np.pi*180.0 distance_list[frame_num] = distance # show frames cv2.imshow('vedio', cv2.resize(draw_flow(next, flow), (0,0), fx=3.0, fy=3.0)) #print('Frame number: ', frame_num) ch = cv2.waitKey(5) & 0xFF if ch == ord('q'): break elif ch == ord(' '): while True: ch = cv2.waitKey(5) & 0xFF if ch == ord(' ') or ch == ord('q'): break