def publish_desired_mode(self, mode): ''' Publish the desired Mode message mode : a String that is either 'ARMED', 'DISARMED', or 'FLYING' ''' desired_mode_msg = Mode() desired_mode_msg.mode = mode self.desired_mode_pub.publish(desired_mode_msg)
def img_callback(data): curr_time = rospy.get_time() img = bridge.imgmsg_to_cv2(data, 'mono8') # img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) global first_img global first_kp global first_des global prev_time global lr_err, fb_err, lr_pid, fb_pid global frame_counter global smoothed_pos alpha = 0.6 if first_img is None: first_img = deepcopy(img) first_kp, first_des = orb.detectAndCompute(first_img, None) print 'Found {} features for the first img!'.format(len(first_kp)) else: H = get_H(img) if H is not None: R, t, n = get_RT(H) RT = np.identity(4) RT[0:3, 0:3] = R[0:3, 0:3] if np.linalg.norm(t) < 50: RT[0:3, 3] = t.T[0] new_pos = compose_pose(RT) smoothed_pos.header = new_pos.header smoothed_pos.pose.position.x = (1. - alpha) * smoothed_pos.pose.position.x + alpha * new_pos.pose.position.x smoothed_pos.pose.position.y = (1. - alpha) * smoothed_pos.pose.position.y + alpha * new_pos.pose.position.y mode = Mode() mode.mode = 5 lr_err.err = smoothed_pos.pose.position.x fb_err.err = smoothed_pos.pose.position.y mode.x_velocity = lr_pid.step(lr_err.err, prev_time - curr_time) mode.y_velocity = fb_pid.step(fb_err.err, prev_time - curr_time) print 'lr', mode.x_velocity, 'fb', mode.y_velocity prev_time = curr_time # mode.header.stamp = rospy.get_rostime() pospub.publish(mode) else: mode = Mode() mode.mode = 5 mode.x_velocity = 0 mode.y_velocity = 0 print "LOST" prev_time = curr_time pospub.publish(mode)
def __init__(self, camera, bridge): picamera.array.PiMotionAnalysis.__init__(self, camera) self.bridge = bridge self.br = tf.TransformBroadcaster() rospy.Subscriber("/pidrone/set_mode", Mode, self.mode_callback) rospy.Subscriber("/pidrone/reset_transform", Empty, self.reset_callback) rospy.Subscriber("/pidrone/toggle_transform", Empty, self.toggle_callback) rospy.Subscriber("/pidrone/infrared", Range, self.range_callback) self.pospub = rospy.Publisher('/pidrone/set_mode_vel', Mode, queue_size=1) self.first_image_pub = rospy.Publisher("/pidrone/picamera/first_image", Image, queue_size=1, latch=True) self.lr_pid = PIDaxis(4.00, -0.00000, 0.000, midpoint=0, control_range=(-10.0, 10.0)) self.fb_pid = PIDaxis(4.00, 0.0000, -0.000, midpoint=0, control_range=(-10.0, 10.0)) self.prev_img = None self.detector = cv2.ORB(nfeatures=120, scoreType=cv2.ORB_FAST_SCORE) self.index_params = dict(algorithm=6, table_number=6, key_size=12, multi_probe_level=1) self.search_params = dict(checks=50) self.matcher = cv2.FlannBasedMatcher(self.index_params, self.search_params) self.first_kp = None self.first_des = None self.prev_kp = None self.prev_des = None self.first = True self.lr_err = ERR() self.fb_err = ERR() self.prev_time = None self.prev_rostime = None self.pos = [0, 0] self.z = 0.075 self.smoothed_yaw = 0.0 self.yaw_observed = 0.0 self.iacc_yaw = 0.0 self.transforming = False self.last_first_time = None self.target_x = 0 self.target_y = 0 self.first_counter = 0 self.max_first_counter = 0 self.mode = Mode() # constant self.kp_yaw = 80.0 self.ki_yaw = 0.1 self.kpi_yaw = 15.0 self.kpi_max_yaw = 0.01 self.alpha_yaw = 0.1 # perceived yaw smoothing alpha self.hybrid_alpha = 0.1 # blend position with first frame and int
def __init__(self, camera, bridge): picamera.array.PiMotionAnalysis.__init__(self, camera) self.bridge = bridge self.br = tf.TransformBroadcaster() rospy.Subscriber("/pidrone/set_mode", Mode, self.mode_callback) rospy.Subscriber("/pidrone/reset_transform", Empty, self.reset_callback) rospy.Subscriber("/pidrone/toggle_transform", Empty, self.toggle_callback) rospy.Subscriber("/pidrone/infrared", Range, self.range_callback) self.pospub = rospy.Publisher('/pidrone/set_mode_vel', Mode, queue_size=1) self.first_image_pub = rospy.Publisher("/pidrone/picamera/first_image", Image, queue_size=1, latch=True) self.prev_img = None self.prev_kp = None self.prev_des = None self.first = True self.lr_err = ERR() self.fb_err = ERR() self.prev_time = None self.prev_rostime = None self.pos = [0, 0] self.z = 0.075 self.smoothed_yaw = 0.0 self.yaw_observed = 0.0 self.iacc_yaw = 0.0 self.transforming = False self.last_first_time = None self.target_x = 0 self.target_y = 0 self.first_counter = 0 self.max_first_counter = 0 self.mode = Mode() self.i = 1 self.board = MultiWii("/dev/ttyUSB0")
def write(self, data): img = np.reshape(np.fromstring(data, dtype=np.uint8), (240, 320, 3)) # cv2.imshow("img", img) # cv2.waitKey(1) if self.first: self.first = False self.first_img = cv2.cvtColor(np.float32(img), cv2.COLOR_RGB2GRAY) self.prev_img = deepcopy(self.first_img) self.prev_time = rospy.get_time() else: curr_img = cv2.cvtColor(np.float32(img), cv2.COLOR_RGB2GRAY) curr_time = rospy.get_time() corr_first = cv2.phaseCorrelate(self.first_img, curr_img) corr_int = cv2.phaseCorrelate(self.prev_img, curr_img) self.prev_img = curr_img print 'first', corr_first print 'int', corr_int if corr_first[1] > self.threshold: # not lost print "first" self.pos = [ corr_first[0][0] * self.z / 320., corr_first[0][1] * self.z / 240. ] else: print "integrated" self.pos[0] += corr_int[0][0] * self.z / 320. self.pos[1] += corr_int[0][1] * self.z / 240. self.lr_err.err = self.pos[0] self.fb_err.err = self.pos[1] mode = Mode() mode.x_i += self.lr_pid.step(self.lr_err.err, self.prev_time - curr_time) mode.y_i += self.fb_pid.step(self.fb_err.err, self.prev_time - curr_time) self.pospub.publish(mode) prev_time = curr_time
import rospy from pidrone_pkg.msg import Mode if __name__ == '__main__': rospy.init_node('terminal_mode_selector') modepub = rospy.Publisher('/pidrone/set_mode', Mode, queue_size=1) valid_modes = [0, 1, 3, 4, 5] mode_msg = Mode() print 'Valid modes are', valid_modes while not rospy.is_shutdown(): raw_mode = raw_input('Type a mode number and press enter:\t') try: des_mode = int(raw_mode.strip()) if des_mode in valid_modes: mode_msg.mode = des_mode print 'Sending mode {}!'.format(des_mode) modepub.publish(mode_msg) else: print 'Good job! You entered an integer but we dont support it... yet...' except Exception as e: print 'Bruh. {} is not an integer'.format(raw_mode)
def __init__(self): # Initialize the current and desired modes self.current_mode = Mode('DISARMED') self.desired_mode = Mode('DISARMED') # Initialize in velocity control self.position_control = False # Initialize the current and desired positions self.current_position = Position() self.desired_position = Position(z=0.3) self.last_desired_position = Position(z=0.3) # Initialize the position error self.position_error = Error() # Initialize the current and desired velocities self.current_velocity = Velocity() self.desired_velocity = Velocity() # Initialize the velocity error self.velocity_error = Error() # Set the distance that a velocity command will move the drone (m) self.desired_velocity_travel_distance = 0.1 # Set a static duration that a velocity command will be held self.desired_velocity_travel_time = 0.1 # Set a static duration that a yaw velocity command will be held self.desired_yaw_velocity_travel_time = 0.25 # Store the start time of the desired velocities self.desired_velocity_start_time = None self.desired_yaw_velocity_start_time = None # Initialize the primary PID self.pid = PID() # Initialize the error used for the PID which is vx, vy, z where vx and # vy are velocities, and z is the error in the altitude of the drone self.pid_error = Error() # Initialize the 'position error to velocity error' PIDs: # left/right (roll) pid self.lr_pid = PIDaxis(kp=10.0, ki=0.5, kd=5.0, midpoint=0, control_range=(-10.0, 10.0)) # front/back (pitch) pid self.fb_pid = PIDaxis(kp=10.0, ki=0.5, kd=5.0, midpoint=0, control_range=(-10.0, 10.0)) # Initialize the pose callback time self.last_pose_time = None # Initialize the desired yaw velocity self.desired_yaw_velocity = 0 # Initialize the current and previous roll, pitch, yaw values self.current_rpy = RPY() self.previous_rpy = RPY() # initialize the current and previous states self.current_state = State() self.previous_state = State() # Store the command publisher self.cmdpub = None # a variable used to determine if the drone is moving between disired # positions self.moving = False # a variable that determines the maximum magnitude of the position error # Any greater position error will overide the drone into velocity # control self.safety_threshold = 1.5 # determines if the position of the drone is known self.lost = False # determines if the desired poses are aboslute or relative to the drone self.absolute_desired_position = False # determines whether to use open loop velocity path planning which is # accomplished by calculate_travel_time self.path_planning = True
def __init__(self): self.bridge = CvBridge() self.br = tf.TransformBroadcaster() # self.lr_pid = PIDaxis(11.0, 0.001, 0.001, midpoint=0, control_range=(-5.0, 5.0)) # self.fb_pid = PIDaxis(11.0, 0.001, 0.001, midpoint=0, control_range=(-5.0, 5.0)) self.lr_pid = PIDaxis(9.0, 0.001, 0.001, midpoint=0, control_range=(-4.0, 4.0)) self.fb_pid = PIDaxis(9.0, 0.001, 0.001, midpoint=0, control_range=(-4.0, 4.0)) self.detector = cv2.ORB(nfeatures=200, scoreType=cv2.ORB_FAST_SCORE) # FAST_SCORE is a little faster to compute map_grid_kp, map_grid_des = create_map('big_map.jpg') self.estimator = LocalizationParticleFilter(map_grid_kp, map_grid_des) self.first_locate = True self.first_hold = True self.prev_img = None self.prev_kp = None self.prev_des = None self.locate_position = False self.prev_time = None self.prev_rostime = None self.pos = [0, 0, 0] self.yaw = 0.0 self.z = 0.075 self.set_z = INIZ_Z self.iacc_yaw = 0.0 self.hold_position = False self.target_pos = [0, 0, self.set_z] self.target_point = Point() self.target_yaw = 0.0 self.map_counter = 0 self.max_map_counter = 0 self.mode = Mode() self.mode.mode = 5 # constant self.kp_yaw = 40.0 self.ki_yaw = 0.1 self.alpha_yaw = 0.1 # perceived yaw smoothing alpha self.hybrid_alpha = 0.75 # blend position with first frame and int # angle self.angle_x = 0.0 # the hz of state_controller is different self.angle_y = 0.0 # TODO: current z position is controlled by state_controller self.increment_z = 0 # path from MDP self.path = [] self.path_index = -1 rospy.Subscriber("/pidrone/set_mode", Mode, self.mode_callback) rospy.Subscriber('/pidrone/path', Float32MultiArray, self.path_callback) rospy.Subscriber('/hololens/path', String, self.hololens_path_callback) rospy.Subscriber("/pidrone/reset_transform", Empty, self.reset_callback) rospy.Subscriber("/pidrone/toggle_transform", Empty, self.toggle_callback) rospy.Subscriber("/pidrone/capture_photo", Empty, self.capture_callback) rospy.Subscriber("/pidrone/infrared", Range, self.range_callback) rospy.Subscriber('/pidrone/angle', TwistStamped, self.angle_callback) rospy.Subscriber('/pidrone/picamera/image_raw', Image, self.image_callback) self.pospub = rospy.Publisher('/pidrone/set_mode_vel', Mode, queue_size=1) self.target_pos_pub = rospy.Publisher('/pidrone/drone_position', Point, queue_size=1) self.first_image_pub = rospy.Publisher("/pidrone/picamera/first_image", Image, queue_size=1, latch=True) self.captured_image_pub = rospy.Publisher("/pidrone/picamera/captured_image", CompressedImage, queue_size=1, latch=True)
def write(self, data): global first_counter global max_first_counter global replan_period global replan_last_reset global replan_next_deadline global replan_until_deadline global replan_vel_x global replan_vel_y global replan_scale global vel_average global vel_alpha global vel_average_time img = np.reshape(np.fromstring(data, dtype=np.uint8), (240, 320, 3)) # cv2.imshow("img", img) # cv2.waitKey(1) curr_rostime = rospy.Time.now() curr_time = curr_rostime.to_sec() shouldi_set_velocity = 1 #0 if np.abs(curr_time - replan_last_reset) > replan_period: replan_last_reset = curr_time replan_next_deadline = replan_last_reset + replan_period shouldi_set_velocity = 1 replan_until_deadline = replan_next_deadline-curr_time #print curr_time, replan_last_reset, replan_next_deadline, replan_until_deadline, replan_vel_x, replan_vel_y if self.first: print "taking new first" self.first = False #self.first_img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY) self.first_img = img cv2.imwrite("first_img" + str(self.i) + ".jpg", self.first_img) #self.prev_img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY) self.prev_img = img self.prev_rostime = curr_rostime self.prev_time = curr_time self.i += 1 image_message = self.bridge.cv2_to_imgmsg(img, encoding="bgr8") self.first_image_pub.publish(image_message) elif self.transforming: #curr_img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY) curr_img = img corr_first = cv2.estimateRigidTransform(self.first_img, curr_img, False) corr_int = cv2.estimateRigidTransform(self.prev_img, curr_img, False) self.prev_img = curr_img self.prev_rostime = curr_rostime self.prev_time = curr_time if corr_first is not None: self.last_first_time = curr_time if curr_time - self.last_first_time > 2: self.first = True first_displacement = [corr_first[0, 2] / 320., corr_first[1, 2] / 240.] scalex = np.linalg.norm(corr_first[:, 0]) scalez = np.linalg.norm(corr_first[:, 1]) corr_first[:, 0] /= scalex corr_first[:, 1] /= scalez self.yaw_observed = math.atan2(corr_first[1, 0], corr_first[0, 0]) #print first_displacement, yaw_observed #yaw = yaw_observed self.smoothed_yaw = (1.0 - self.alpha_yaw) * self.smoothed_yaw + (self.alpha_yaw) * self.yaw_observed yaw = self.smoothed_yaw vel_average[0] = (1.0 - vel_alpha) * vel_average[0] + (vel_alpha) * self.pos[0] # jgo XXX see what happens if we dont reset upon seeing first #self.pos = [first_displacement[0] * self.z, first_displacement[1] * self.z / 240., yaw] # jgo XXX see what happens if we use alpha blending hybrid_alpha = 0.1 # needs to be between 0 and 1.0 self.pos[0:2] = [(hybrid_alpha) * first_displacement[0] * self.z + (1.0 - hybrid_alpha) * self.pos[0], (hybrid_alpha) * first_displacement[1] * self.z + (1.0 - hybrid_alpha) * self.pos[1]] vel_average[0] = (1.0 - vel_alpha) * vel_average[0] + (vel_alpha) * self.pos[0] vel_average[1] = (1.0 - vel_alpha) * vel_average[1] + (vel_alpha) * self.pos[1] vel_average_time = (1.0 - vel_alpha) * vel_average_time + (vel_alpha) * curr_time #print "times: ", vel_average_time, curr_time, curr_time - vel_average_time #self.lr_err.err = vel_average[0] + self.target_x #self.fb_err.err = vel_average[1] + self.target_y self.lr_err.err = self.pos[0] + self.target_x self.fb_err.err = self.pos[1] + self.target_y print "ERR", self.lr_err.err, self.fb_err.err mode = Mode() mode.mode = 5 mode.x_i += self.lr_pid.step(self.lr_err.err, self.prev_time - curr_time) mode.y_i += self.fb_pid.step(self.fb_err.err, self.prev_time - curr_time) #mode.x_velocity = mode.x_i #mode.y_velocity = mode.y_i # jgo XXX LOLOL constant velocity controller first_counter = first_counter + 1 max_first_counter = max(first_counter, max_first_counter) cvc_norm = np.sqrt(mode.x_i * mode.x_i + mode.y_i * mode.y_i) if cvc_norm <= 0.01: cvc_norm = 1.0 # XXX cvc_vel = 1.00#0.15#0.25 if shouldi_set_velocity: replan_vel_x = mode.x_i * replan_scale / max(replan_until_deadline, 0.1) replan_vel_y = mode.y_i * replan_scale / max(replan_until_deadline, 0.1) replan_vel_x = min(replan_vel_x, 1.0) replan_vel_y = min(replan_vel_y, 1.0) # XXX coast if first frame found but still do PID update to # integrate! #mode.x_velocity = 0 #mode.y_velocity = 0 mode.x_velocity = cvc_vel * mode.x_i mode.y_velocity = cvc_vel * mode.y_i #mode.x_velocity = cvc_vel * mode.x_i / cvc_norm #mode.y_velocity = cvc_vel * mode.y_i / cvc_norm #mode.x_velocity = replan_vel_x #mode.y_velocity = replan_vel_y self.iacc_yaw += yaw * self.ki_yaw yaw_kpi_term = np.sign(yaw) * yaw * yaw * self.kpi_yaw if abs(yaw_kpi_term) < self.kpi_max_yaw: self.iacc_yaw += yaw_kpi_term else: self.iacc_yaw += np.sign(yaw) * self.kpi_max_yaw mode.yaw_velocity = yaw * self.kp_yaw + self.iacc_yaw print "yaw iacc: ", self.iacc_yaw self.pospub.publish(mode) print "first", max_first_counter, first_counter elif corr_int is not None: time_since_first = curr_time - self.last_first_time print "integrated", time_since_first print "max_first_counter: ", max_first_counter int_displacement = [corr_int[0, 2] / 320., corr_int[1, 2] / 240.] scalex = np.linalg.norm(corr_int[:, 0]) scalez = np.linalg.norm(corr_int[:, 1]) corr_int[:, 0] /= scalex corr_int[:, 1] /= scalez yaw = math.atan2(corr_int[1, 0], corr_int[0, 0]) print int_displacement, yaw self.pos[0] += int_displacement[0] * self.z self.pos[1] += int_displacement[1] * self.z #self.pos[2] = yaw #vel_average = (1.0 - vel_alpha) * vel_average[0] + (vel_alpha) * self.pos[0] #vel_average = (1.0 - vel_alpha) * vel_average[1] + (vel_alpha) * self.pos[1] vel_average_time = (1.0 - vel_alpha) * vel_average_time + (vel_alpha) * curr_time #print "times: ", vel_average_time, curr_time, curr_time - vel_average_time #self.lr_err.err = vel_average[0] + self.target_x #self.fb_err.err = vel_average[1] + self.target_y self.lr_err.err = self.pos[0] + self.target_x self.fb_err.err = self.pos[1] + self.target_y print "ERR", self.lr_err.err, self.fb_err.err mode = Mode() mode.mode = 5 mode.x_i += self.lr_pid.step(self.lr_err.err, self.prev_time - curr_time) mode.y_i += self.fb_pid.step(self.fb_err.err, self.prev_time - curr_time) # jgo XXX LOLOL constant velocity controller first_counter = 0 cvc_norm = np.sqrt(mode.x_i * mode.x_i + mode.y_i * mode.y_i) if cvc_norm <= 0.01: cvc_norm = 1.0 cvc_vel = 3.0#0.25 #1.0 if shouldi_set_velocity: #replan_vel_x = mode.x_i * replan_scale# * cvc_vel #replan_vel_y = mode.y_i * replan_scale# * cvc_vel replan_vel_x = mode.x_i * replan_scale / max(replan_until_deadline, 0.1) replan_vel_y = mode.y_i * replan_scale / max(replan_until_deadline, 0.1) replan_vel_x = min(replan_vel_x, 1.0) replan_vel_y = min(replan_vel_y, 1.0) #cvc_vel = max(min(time_since_first * 0.1, 1.0), 0.0) mode.x_velocity = cvc_vel * mode.x_i mode.y_velocity = cvc_vel * mode.y_i #mode.x_velocity = cvc_vel * mode.x_i / cvc_norm #mode.y_velocity = cvc_vel * mode.y_i / cvc_norm #mode.x_velocity = replan_vel_x #mode.y_velocity = replan_vel_y #mode.yaw_velocity = yaw * self.kp_yaw # yaw i term only mode.yaw_velocity = self.iacc_yaw print "yaw iacc: ", self.iacc_yaw self.pospub.publish(mode) else: print "LOST" else: #print "Not transforming" #curr_img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY) curr_img = img corr_first = cv2.estimateRigidTransform(self.first_img, curr_img, False) if corr_first is not None: #print "first" self.last_first_time = rospy.get_time() if curr_time - self.last_first_time > 2: self.first = True else: print "no first", curr_time - self.last_first_time self.prev_img = curr_img self.prev_rostime = curr_rostime self.prev_time = curr_time prev_time = curr_time #print "smoothed yaw: ", self.smoothed_yaw self.br.sendTransform((self.pos[0] * 0.01, self.pos[1] * 0.01, self.pos[2] * 0.01), tf.transformations.quaternion_from_euler(0, 0, self.yaw_observed), rospy.Time.now(), "base", "world")
def disarming(self): mode = Mode() mode.mode = "DISARMED" return mode
def takeoff(self): mode = Mode() mode.mode = "FLYING" return mode
def disarming(): print "disarm" mode = Mode() mode.mode = "DISARMED" return mode
def arm(): print "arm" mode = Mode() mode.mode = "ARMED" return mode
import rospy from pidrone_pkg.msg import Mode from sensor_msgs.msg import Joy from std_msgs.msg import Empty import numpy as np import os z_total_steps = 24 #z_counter = (z_total_steps / 4) - 1 z_counter = -1 z_step = 5 # cm scalar = 15 mode = Mode() mode.mode = 4 modepub = rospy.Publisher('/pidrone/set_mode', Mode, queue_size=1) resetpub = rospy.Publisher('/pidrone/reset_transform', Empty, queue_size=1) togglepub = rospy.Publisher('/pidrone/toggle_transform', Empty, queue_size=1) def joy_callback(data): global scalar global modepub global mode global resetpub global z_counter global z_step global z_total_steps print "callback" if data.buttons[3] == 1: z_counter = (z_counter+1) % z_total_steps print 3, "Z Stepping", z_counter mode.mode = 5
def __init__(self, camera, bridge): picamera.array.PiMotionAnalysis.__init__(self, camera) self.bridge = bridge self.br = tf.TransformBroadcaster() rospy.Subscriber("/pidrone/set_mode", Mode, self.mode_callback) rospy.Subscriber("/pidrone/reset_transform", Empty, self.reset_callback) rospy.Subscriber("/pidrone/toggle_transform", Empty, self.toggle_callback) rospy.Subscriber("/pidrone/infrared", Range, self.range_callback) rospy.Subscriber('/pidrone/angle_and_velocity', Twist, self.angle_and_velocity_callback) self.pospub = rospy.Publisher('/pidrone/set_mode_vel', Mode, queue_size=1) self.first_image_pub = rospy.Publisher("/pidrone/picamera/first_image", Image, queue_size=1, latch=True) self.lr_pid = PIDaxis(10.0, 0.000, 1.0, midpoint=0, control_range=(-10.0, 10.0)) self.fb_pid = PIDaxis(10.0, 0.000, 1.0, midpoint=0, control_range=(-10.0, 10.0)) self.detector = cv2.ORB(nfeatures=120, scoreType=cv2.ORB_FAST_SCORE ) # FAST_SCORE is a little faster to compute map_imgs = [ cv2.imread('map1.jpg'), cv2.imread('map2.jpg'), cv2.imread('map3.jpg'), cv2.imread('map4.jpg') ] map_detector = cv2.ORB(nfeatures=500, scoreType=cv2.ORB_FAST_SCORE) # TODO use particle filter, so we can use more features which makes GridAdaptedFeatureDetector work better # map_detector = cv2.GridAdaptedFeatureDetector(self.detector, maxTotalKeypoints=500) # find features evenly self.map_index = 0 self.map_features = [] # save features for each map in an array for map_img in map_imgs: map_kp = map_detector.detect(map_img, None) map_kp, map_des = self.detector.compute(map_img, map_kp) if map_kp is None or map_des is None: print "Map cannot be detect and compute !" sys.exit() else: self.map_features.append((map_kp, map_des)) self.map_kp, self.map_des = self.map_features[0] self.prev_img = None index_params = dict(algorithm=6, table_number=6, key_size=12, multi_probe_level=1) search_params = dict(checks=50) self.matcher = cv2.FlannBasedMatcher(index_params, search_params) self.prev_kp = None self.prev_des = None self.first = True self.prev_time = None self.prev_rostime = None self.pos = [0, 0] self.z = 0.075 self.angle_x = 0.0 # the hz of state_controller is different self.angle_y = 0.0 self.smoothed_yaw = 0.0 self.yaw_observed = 0.0 self.iacc_yaw = 0.0 self.transforming = False self.target_pos = [0, 0] self.map_counter = 0 self.max_map_counter = 0 self.map_missing_counter = 0 self.mode = Mode() self.mode.mode = 5 # constant self.kp_yaw = 100.0 self.ki_yaw = 0.1 self.alpha_yaw = 0.1 # perceived yaw smoothing alpha self.hybrid_alpha = 0.2 # blend position with first frame and int
def ctrl_c_handler(self, signal, frame): """ Disarm the drone and quits the flight controller node """ print("\nCaught ctrl-c! About to Disarm!") self.desired_mode_callback(Mode('DISARMED')) rospy.sleep(1) sys.exit()
import rospy from pidrone_pkg.msg import Mode from geometry_msgs.msg import PoseStamped import numpy as np from copy import deepcopy mode = Mode() mode.mode = 5 modepub = rospy.Publisher('/pidrone/set_mode', Mode, queue_size=1) max_accel = np.array([25., 25., 1, 1]) # cm/s/s movements = [] pub_frequency = 10. def sp_callback(data): global movements movement = np.array([data.pose.position.x, data.pose.position.y, 0, 0]) movements.append(movement) if __name__ == "__main__": rospy.init_node("acceleration_controller") rospy.Subscriber("/pidrone/pos_setpoint", PoseStamped, sp_callback) global max_accel, movements, pub_frequency while not rospy.is_shutdown(): if len(movements) > 0: # movement is the desired diplacement vector movement = movements.pop(0) movement /= 2. travel_time = np.max(np.sqrt(2. * np.abs(movement) / max_accel)) # the desired acceleration in each axes to ramp smoothly
def write(self, data): img = np.reshape(np.fromstring(data, dtype=np.uint8), (240, 320, 3)) # cv2.imshow("img", img) # cv2.waitKey(1) if self.first: self.first = False self.first_kp, self.first_des = self.orb.detectAndCompute( img, None) print len(self.first_kp), "features for first" self.prev_kp = deepcopy(self.first_kp) self.prev_des = deepcopy(self.first_des) self.prev_time = rospy.get_time() else: curr_time = rospy.get_time() H, int_H = self.get_H(img) if H is not None: print "first" R, t, n = self.get_RT(H) RT = np.identity(4) RT[0:3, 0:3] = R[0:3, 0:3] if np.linalg.norm(t) < 50: RT[0:3, 3] = t.T[0] self.est_RT = RT new_pos = self.compose_pose(self.est_RT) self.smoothed_pos.header = new_pos.header self.smoothed_pos.pose.position.x = ( 1. - self.alpha ) * self.smoothed_pos.pose.position.x + self.alpha * new_pos.pose.position.x self.smoothed_pos.pose.position.y = ( 1. - self.alpha ) * self.smoothed_pos.pose.position.y + self.alpha * new_pos.pose.position.y mode = Mode() mode.mode = 5 self.lr_err.err = self.smoothed_pos.pose.position.x self.fb_err.err = self.smoothed_pos.pose.position.y mode.x_i += self.lr_pid.step(self.lr_err.err, self.prev_time - curr_time) mode.y_i += self.fb_pid.step(self.fb_err.err, self.prev_time - curr_time) self.prev_time = curr_time mode.featureset = 1. self.pospub.publish(mode) elif int_H is not None: print "only integrated" R, t, n = self.get_RT(int_H) RT = np.identity(4) RT[0:3, 0:3] = R[0:3, 0:3] if np.linalg.norm(t) < 50: RT[0:3, 3] = t.T[0] self.est_RT = np.dot(RT, self.est_RT) new_pos = self.compose_pose(self.est_RT) self.smoothed_pos.header = new_pos.header self.smoothed_pos.pose.position.x = ( 1. - self.alpha ) * self.smoothed_pos.pose.position.x + self.alpha * new_pos.pose.position.x self.smoothed_pos.pose.position.y = ( 1. - self.alpha ) * self.smoothed_pos.pose.position.y + self.alpha * new_pos.pose.position.y mode = Mode() mode.mode = 5 self.lr_err.err = self.smoothed_pos.pose.position.x self.fb_err.err = self.smoothed_pos.pose.position.y mode.x_i += self.lr_pid.step(self.lr_err.err, self.prev_time - curr_time) mode.y_i += self.fb_pid.step(self.fb_err.err, self.prev_time - curr_time) self.prev_time = curr_time mode.featureset = -1. self.pospub.publish(mode) else: print "LOST" self.prev_time = curr_time
def __init__(self): self.bridge = CvBridge() self.br = tf.TransformBroadcaster() self.lr_pid = PIDaxis(10.0, 0.000, 0.0, midpoint=0, control_range=(-5.0, 5.0)) self.fb_pid = PIDaxis(10.0, 0.000, 0.0, midpoint=0, control_range=(-5.0, 5.0)) self.detector = cv2.ORB(nfeatures=NUM_FEATURES, scoreType=cv2.ORB_FAST_SCORE) self.estimator = FastSLAM() self.first_locate = True self.first_hold = True self.prev_img = None self.prev_kp = None self.prev_des = None self.locate_position = False self.prev_time = None self.prev_rostime = None self.pos = [0, 0, 0] self.yaw = 0.0 self.z = 0.075 self.iacc_yaw = 0.0 self.hold_position = False self.target_pos = [0, 0, 0] self.target_yaw = 0.0 self.map_counter = 0 self.max_map_counter = 0 self.mode = Mode() self.mode.mode = 5 # constant self.kp_yaw = 50.0 self.ki_yaw = 0.1 self.alpha_yaw = 0.1 # perceived yaw smoothing alpha self.hybrid_alpha = 0.3 # blend position with first frame and int # angle self.angle_x = 0.0 # the hz of state_controller is different self.angle_y = 0.0 rospy.Subscriber("/pidrone/set_mode", Mode, self.mode_callback) rospy.Subscriber("/pidrone/reset_transform", Empty, self.reset_callback) rospy.Subscriber("/pidrone/toggle_transform", Empty, self.toggle_callback) rospy.Subscriber("/pidrone/infrared", Range, self.range_callback) rospy.Subscriber('/pidrone/angle', TwistStamped, self.angle_callback) rospy.Subscriber('/pidrone/picamera/image_raw', Image, self.image_callback) self.pospub = rospy.Publisher('/pidrone/set_mode_vel', Mode, queue_size=1) self.first_image_pub = rospy.Publisher("/pidrone/picamera/first_image", Image, queue_size=1, latch=True)
prev_angx = 0 prev_angy = 0 prev_angt = time.time() # stefie10: Use labeled constants for the four different modes. I # should never have to remember that mode 0 is disarmed or # whatever it really is. http://wiki.ros.org/msg#Constants. It # is okay if they are integer constants but they should be # referenced by name (e.g., Mode.DISARMED) in the code. I had to # inspect the code to figure out what mode was what, and still got # it wrong when writing the keyboard controller - it added a whole # bunch of extra time and needless errors. Even if ROS didn't # support enums, this code could have used them by defining them # as variable constants. mode_to_pub = Mode() while not rospy.is_shutdown(): mode_to_pub.mode = current_mode modepub.publish(mode_to_pub) errpub.publish(error) mw_data = board.getData(MultiWii.ATTITUDE) analog_data = board.getData(MultiWii.ANALOG) publishRos(board, imupub, markerpub, markerarraypub, statepub) if current_mode != 4: # stefie10: ENUM ENUM ENUM! # angle compensation calculations try: # stefie10: Put this code inside of a method or several methods.
def takeoff(): print "takeoff" mode = Mode() mode.mode = "FLYING" return mode
def __init__(self, camera, bridge): picamera.array.PiMotionAnalysis.__init__(self, camera) self.bridge = bridge self.br = tf.TransformBroadcaster() # bind method calls to subscribed topics rospy.Subscriber("/pidrone/set_mode", Mode, self.mode_callback) rospy.Subscriber("/pidrone/reset_transform", Empty, self.reset_callback) rospy.Subscriber("/pidrone/toggle_transform", Empty, self.toggle_callback) rospy.Subscriber("/pidrone/infrared", Range, self.range_callback) rospy.Subscriber('/pidrone/angle', TwistStamped, self.angle_callback) self.pospub = rospy.Publisher('/pidrone/set_mode_vel', Mode, queue_size=1) self.first_image_pub = rospy.Publisher("/pidrone/picamera/first_image", Image, queue_size=1, latch=True) self.lr_pid = PIDaxis(10.0, 0.000, 0.0, midpoint=0, control_range=(-5.0, 5.0)) self.fb_pid = PIDaxis(10.0, 0.000, 0.0, midpoint=0, control_range=(-5.0, 5.0)) self.detector = cv2.ORB(nfeatures=NUM_FEATURES, scoreType=cv2.ORB_FAST_SCORE ) # FAST_SCORE is a little faster to compute map_grid_kp, map_grid_des = create_map('map.jpg') self.estimator = LocalizationParticleFilter(map_grid_kp, map_grid_des) self.first_locate = True self.first_hold = True self.prev_img = None self.prev_kp = None self.prev_des = None self.locate_position = False self.prev_time = None self.prev_rostime = None self.pos = [0, 0, 0] self.yaw = 0.0 self.z = 0.075 self.iacc_yaw = 0.0 self.hold_position = False self.target_pos = [0, 0, 0, 0] self.target_yaw = 0.0 self.map_counter = 0 self.max_map_counter = 0 self.mode = Mode() self.mode.mode = 5 # constant self.kp_yaw = 50.0 self.ki_yaw = 0.1 self.alpha_yaw = 0.1 # perceived yaw smoothing alpha self.hybrid_alpha = 0.3 # blend position with first frame and int # angle self.angle_x = 0.0 # the hz of state_controller is different self.angle_y = 0.0
#!/usr/bin/env python import rospy from sensor_msgs.msg import Joy import numpy as np import os from geometry_msgs.msg import Pose, Twist from pidrone_pkg.msg import Mode, RC from std_msgs.msg import Float32, Empty, Bool z_total_steps = 24 #z_counter = (z_total_steps / 4) - 1 z_counter = -1 z_step = 5 # cm scalar = 15 modeMsg = Mode() modeMsg.mode = 'DISARMED' positionMsg = Bool() positionMsg.data = True poseMsg = Pose() twistMsg = Twist() modepub = rospy.Publisher('desired/mode', Mode, queue_size=1) #positionPub = rospy.Publisher('position_control', Bool, queue_size=1) #positionControlPub = rospy.Publisher('desired/pose', Pose, queue_size=1) #elocityControlPub = rospy.Publisher('desired/twist', Twist, queue_size=1) #mappub = rospy.Publisher('map', Empty, queue_size=1)
def main(): rospy.init_node("key_translation") import sys, tty, termios fd = sys.stdin.fileno() #attr = termios.tcgetattr(sys.stdin.fileno()) #tty.setraw(sys.stdin.fileno()) mode = Mode() mode.mode = 4 modepub = rospy.Publisher('/pidrone/set_mode', Mode, queue_size=1) rate = rospy.Rate(10) msg = """ Commands: ;: arm ' ' (spacebar): disarm t: takeoff l: land a: yaw left d: yaw right w: up z: down i: forward k: backward j: left l: right h: hover q: quit """ try: print msg while not rospy.is_shutdown(): ch = getch.getch(0.1) if ch == None: continue if ord(ch) == 32: # disarm print "disarming" mode.mode = 4 mode.x_velocity = 0 mode.y_velocity = 0 mode.z_velocity = 0 mode.yaw_velocity = 0 modepub.publish(mode) elif ch == ";": # arm print "arm" mode.mode = 0 mode.x_velocity = 0 mode.y_velocity = 0 mode.z_velocity = 0 mode.yaw_velocity = 0 modepub.publish(mode) elif ch == "y": # land print "land" mode.x_velocity = 0 mode.y_velocity = 0 mode.z_velocity = 0 mode.yaw_velocity = 0 mode.mode = 3 modepub.publish(mode) elif ch == "h": # hover print "hover" mode.mode = 5 mode.x_velocity = 0 mode.y_velocity = 0 mode.z_velocity = 0 mode.yaw_velocity = 0 modepub.publish(mode) elif ch == "t": print "takeoff" mode.x_velocity = 0 mode.y_velocity = 0 mode.z_velocity = 0 mode.yaw_velocity = 0 mode.mode = 5 modepub.publish(mode) elif ch == "j": print "left" mode.mode = 5 mode.x_velocity = -3 mode.y_velocity = 0 mode.z_velocity = 0 mode.yaw_velocity = 0 modepub.publish(mode) elif ch == "l": print "right" mode.mode = 5 mode.x_velocity = 3 mode.y_velocity = 0 mode.z_velocity = 0 mode.yaw_velocity = 0 modepub.publish(mode) elif ch == "k": print "backward" mode.mode = 5 mode.x_velocity = 0 mode.y_velocity = -3 mode.z_velocity = 0 mode.yaw_velocity = 0 modepub.publish(mode) elif ch == "i": print "forward" mode.mode = 5 mode.x_velocity = 0 mode.y_velocity = 3 mode.z_velocity = 0 mode.yaw_velocity = 0 modepub.publish(mode) elif ch == "a": print "yaw left" mode.mode = 5 mode.x_velocity = 0 mode.y_velocity = 0 mode.z_velocity = 0 mode.yaw_velocity = -50 modepub.publish(mode) elif ch == "d": print "yaw right" mode.mode = 5 mode.x_velocity = 0 mode.y_velocity = 0 mode.z_velocity = 0 mode.yaw_velocity = 50 modepub.publish(mode) elif ch == "w": print "up" mode.mode = 5 mode.x_velocity = 0 mode.y_velocity = 0 mode.z_velocity = 1 mode.yaw_velocity = 0 modepub.publish(mode) elif ch == "s": print "down" mode.mode = 5 mode.x_velocity = 0 mode.y_velocity = 0 mode.z_velocity = -1 mode.yaw_velocity = 0 modepub.publish(mode) elif ch == "q": break elif ord(ch) == 3: # Ctrl-C break else: print "unknown character: '%d'" % ord(ch) pass print msg rate.sleep() finally: print "sending disarm" mode.mode = 4 modepub.publish(mode) time.sleep(0.25)
def arm(self): mode = Mode() mode.mode = "ARMED" return mode