Esempio n. 1
0
class UInode(QtGui.QMainWindow):
    def __init__(self):
        super(UInode, self).__init__()

        self._ap_topic = rospy.Publisher('/apctrl', Empty,
                                         queue_size=5)

        self.swap_red_blue = rospy.get_param('~swap_red_blue', False)

        self.controller = DroneController(
            offline_timeout=rospy.get_param('~connection_check_period', 500))

        self.keymap = self.gen_keymap()

        self.messages = Messages(
            rospy.get_param('~message_display_time', 5000), *grid)

        self.messages_named_template = re.compile(
            r'((?P<name>[a-zA-Z0-9_-]+)::)?(?P<message>.*)')

        self.setWindowTitle('ARdrone camera')
        self.image_box = QtGui.QLabel(self)
        self.setCentralWidget(self.image_box)

        self.image = None
        self.image_lock = Lock()

        fps = rospy.get_param('~fps', 50)

        self.redraw_timer = QtCore.QTimer(self)
        self.redraw_timer.timeout.connect(self.on_redraw)
        self.redraw_timer.start(1000 / fps)

        rospy.Subscriber('/ui/message', String, self.on_ui_request)
        rospy.Subscriber('/in/image', Image, self.on_video_update)

    def on_ui_request(self, message):
        """Process the message show request

        We have spetial `ui/message` topic where any node can send
        any message and that message will be displayed.

        By default, messages are displayed for a while and than hidden.

        Messages which match the mask `([a-zA-Z0-9_-])::(.*)` will be displayed
        permanently. Newer messages will overwrite older messages
        with the same name.

        """
        match = self.messages_named_template.match(message.data)
        self.messages.message_put(**match.groupdict())

    def on_video_update(self, data):
        """On each frame we save new picture for future rendering"""
        self.communication_since_timer = True

        image = QtGui.QImage(data.data,
                             data.width,
                             data.height,
                             QtGui.QImage.Format_RGB888)
        if self.swap_red_blue:
            image = QtGui.QImage.rgbSwapped(image)

        with self.image_lock:
            self.image = image

    def on_redraw(self):
        """Redraw interface"""
        image = None
        with self.image_lock:
            if self.controller.is_online and self.image is not None:
                image = QtGui.QPixmap.fromImage(self.image)
            else:
                image = QtGui.QPixmap(640, 360)
                image.fill(QtGui.QColor(50, 50, 50))

        self.messages.messages_put((
            (self.controller.status.readable(), 'drone.status'),
            (self.controller.battery, 'drone.battery'),
        ))

        self.messages.render(image)

        self.resize(image.width(), image.height())
        self.image_box.setPixmap(image)

    def gen_keymap(self):
        return {
            QtCore.Qt.Key.Key_R: lambda ax, e: self.controller.reset(),
            QtCore.Qt.Key.Key_T: lambda ax, e: self.controller.takeoff(),
            QtCore.Qt.Key.Key_L: lambda ax, e: self.controller.land(),
            QtCore.Qt.Key.Key_H: lambda ax, e: self.controller.hover(),
            QtCore.Qt.Key.Key_A: lambda ax, e: self.controller.send_vel(y=ax),
            QtCore.Qt.Key.Key_D: lambda ax, e: self.controller.send_vel(y=-ax),
            QtCore.Qt.Key.Key_W: lambda ax, e: self.controller.send_vel(x=ax),
            QtCore.Qt.Key.Key_S: lambda ax, e: self.controller.send_vel(x=-ax),
            QtCore.Qt.Key.Key_Q: lambda ax, e: self.controller.send_vel(yaw=ax),
            QtCore.Qt.Key.Key_E: lambda ax, e: self.controller.send_vel(yaw=-ax),
            QtCore.Qt.Key.Key_BracketRight: lambda ax, e: self.controller.send_vel(z=ax),
            QtCore.Qt.Key.Key_BracketLeft: lambda ax, e: self.controller.send_vel(z=-ax),
            QtCore.Qt.Key.Key_Y: lambda ax, e: self._ap_topic.publish(Empty()) if ax != 0 else None,
        }

    def keyPressEvent(self, event):
        key = event.key()

        if event.isAutoRepeat() or self.controller is None:
            return

        if key in self.keymap:
            self.keymap[key](1, event)

    def keyReleaseEvent(self, event):
        key = event.key()

        if event.isAutoRepeat() or self.controller is None:
            return

        if key in self.keymap:
            self.keymap[key](0, event)
Esempio n. 2
0
class UInode(QtGui.QMainWindow):
    def __init__(self):
        super(UInode, self).__init__()

        self._ap_topic = rospy.Publisher('/apctrl', Empty, queue_size=5)
        self.webcam_shutdown_pub = rospy.Publisher('/webcam/shutdown',
                                                   Empty,
                                                   queue_size=5)

        self.swap_red_blue = rospy.get_param('~swap_red_blue', False)

        self.controller = DroneController(
            offline_timeout=rospy.get_param('~connection_check_period', 500))

        self.keymap = self.gen_keymap()
        # Without release action
        self.keymap2 = self.gen_keymap2()

        self.messages = Messages(
            rospy.get_param('~message_display_time', 5000), *grid)

        self.messages_named_template = re.compile(
            r'((?P<name>[a-zA-Z0-9_-]+)::)?(?P<message>.*)')

        self.setWindowTitle('ARdrone camera')
        self.image_box = QtGui.QLabel(self)
        self.setCentralWidget(self.image_box)

        self.image = None
        self.image_lock = Lock()

        fps = rospy.get_param('~fps', 50)

        self.redraw_timer = QtCore.QTimer(self)
        self.redraw_timer.timeout.connect(self.on_redraw)
        self.redraw_timer.start(1000 / fps)

        rospy.Subscriber('/ui/message', String, self.on_ui_request)
        rospy.Subscriber('/out/image', Image, self.on_video_update)

    def on_ui_request(self, message):
        """Process the message show request

        We have spetial `ui/message` topic where any node can send
        any message and that message will be displayed.

        By default, messages are displayed for a while and than hidden.

        Messages which match the mask `([a-zA-Z0-9_-])::(.*)` will be displayed
        permanently. Newer messages will overwrite older messages
        with the same name.

        """
        match = self.messages_named_template.match(message.data)
        self.messages.message_put(**match.groupdict())

    def on_video_update(self, data):
        """On each frame we save new picture for future rendering"""
        self.communication_since_timer = True

        image = QtGui.QImage(data.data, data.width, data.height,
                             QtGui.QImage.Format_RGB888)
        if self.swap_red_blue:
            image = QtGui.QImage.rgbSwapped(image)

        with self.image_lock:
            self.image = image

    def on_redraw(self):
        """Redraw interface"""
        image = None
        with self.image_lock:
            if self.image is not None:
                image = QtGui.QPixmap.fromImage(self.image)
            else:
                image = QtGui.QPixmap(640, 360)
                image.fill(QtGui.QColor(50, 50, 50))

        self.messages.messages_put((
            (self.controller.status.readable(), 'drone.status'),
            (self.controller.battery, 'drone.battery'),
        ))

        self.messages.render(image)

        self.resize(image.width(), image.height())
        self.image_box.setPixmap(image)

    def gen_keymap2(self):
        return {
            QtCore.Qt.Key.Key_M: lambda ax, e: self.controller.enable_cv(),
            QtCore.Qt.Key.Key_N:
            lambda ax, e: self.controller.enable_controller(),
        }

    def gen_keymap(self):
        return {
            QtCore.Qt.Key.Key_R:
            lambda ax, e: self.controller.reset(),
            QtCore.Qt.Key.Key_T:
            lambda ax, e: self.controller.takeoff(),
            QtCore.Qt.Key.Key_L:
            lambda ax, e: self.controller.land(),
            QtCore.Qt.Key.Key_C:
            lambda ax, e: self.controller.change_camera(),
            QtCore.Qt.Key.Key_M:
            lambda ax, e: self.controller.enable_cv(),
            QtCore.Qt.Key.Key_N:
            lambda ax, e: self.controller.enable_controller(),
            QtCore.Qt.Key.Key_F1:
            lambda ax, e: self.controller.increaseP(),
            QtCore.Qt.Key.Key_F2:
            lambda ax, e: self.controller.decreaseP(),
            QtCore.Qt.Key.Key_F3:
            lambda ax, e: self.controller.increaseI(),
            QtCore.Qt.Key.Key_F4:
            lambda ax, e: self.controller.decreaseI(),
            QtCore.Qt.Key.Key_F5:
            lambda ax, e: self.controller.increaseD(),
            QtCore.Qt.Key.Key_F6:
            lambda ax, e: self.controller.decreaseD(),
            QtCore.Qt.Key.Key_H:
            lambda ax, e: self.controller.hover(),
            QtCore.Qt.Key.Key_A:
            lambda ax, e: self.controller.send_vel(y=ax),
            QtCore.Qt.Key.Key_D:
            lambda ax, e: self.controller.send_vel(y=-ax),
            QtCore.Qt.Key.Key_W:
            lambda ax, e: self.controller.send_vel(x=ax),
            QtCore.Qt.Key.Key_S:
            lambda ax, e: self.controller.send_vel(x=-ax),
            QtCore.Qt.Key.Key_Q:
            lambda ax, e: self.controller.send_vel(yaw=ax),
            QtCore.Qt.Key.Key_E:
            lambda ax, e: self.controller.send_vel(yaw=-ax),
            QtCore.Qt.Key.Key_BracketRight:
            lambda ax, e: self.controller.send_vel(z=ax),
            QtCore.Qt.Key.Key_BracketLeft:
            lambda ax, e: self.controller.send_vel(z=-ax),
            QtCore.Qt.Key.Key_Y:
            lambda ax, e: self._ap_topic.publish(Empty()) if ax != 0 else None,
        }

    def keyPressEvent(self, event):
        key = event.key()

        if event.isAutoRepeat() or self.controller is None:
            return

        if key in self.keymap:
            self.keymap[key](1, event)

    def keyReleaseEvent(self, event):
        key = event.key()

        if event.isAutoRepeat() or self.controller is None:
            return

        if key in self.keymap and key not in self.keymap2:
            self.keymap[key](0, event)
Esempio n. 3
0
class ControllerNode(object):
    def __init__(self):
        self._b = False
        self.last_cmd_sent = datetime.now()
        self.controller = DroneController()
        self.min_points = 15
        self.filter = MovingAverage()
        self.tf = tf.TransformListener()
        self._ap_ctrl = rospy.Subscriber('/apctrl',
                                         Empty,
                                         self.on_toggle,
                                         queue_size=1)
        self.is_active = False
        self.is_webcam = rospy.get_param('~is_webcam', False)

    def send_vel(self, x=0, y=0):
        if self.is_active:
            if x == 0 and y == 0:
                self.controller.hover()
            else:
                self.controller.send_vel(x, y)
            print(x, y, str(datetime.now() - self.last_cmd_sent))
            self.last_cmd_sent = datetime.now()

    def on_toggle(self, e):
        self.is_active = not self.is_active
        print(self.is_active)

    def get_rot_matrix(self, rev=False):
        frm, to = 'ardrone/odom', 'ardrone/ardrone_base_bottomcam'
        if rev:
            frm, to = to, frm

        self.tf.waitForTransform(frm, to, rospy.Time(0), rospy.Duration(3))
        trans, rot = self.tf.lookupTransform(frm, to, rospy.Time(0))

        x, y, z = euler_from_quaternion(rot)
        yaw = euler_from_quaternion(rot, axes='szxy')[0]

        return yaw, np.array(euler_matrix(-x, -y, z))

    def process_data(self, local_coordinates):
        coordinates = self.filter(local_coordinates)
        if coordinates is None:
            if datetime.now() - self.last_cmd_sent > timedelta(seconds=0.3):
                self.send_vel(0, 0)
            return

        coordinates.resize(1, 4)

        yaw, rot_matrix = self.get_rot_matrix(rev=True)

        point = coordinates.dot(rot_matrix)

        if any(point[0]) and point[0][2] < 5000:
            x, y, z = point[0][0], point[0][1], point[0][2]
            x, y = x * cos(yaw) - y * sin(yaw), x * sin(yaw) + y * cos(yaw)

            y -= 50

            left = (min(max(x, -700), 700) / 10000)
            forward = -(min(max(y, -700), 700) / 10000)

            self.send_vel(forward, left)

        if datetime.now() - self.last_cmd_sent > timedelta(seconds=0.3):
            self.send_vel(0, 0)
Esempio n. 4
0
class UInode(QtGui.QMainWindow):
    keypressChecker = 0

    def __init__(self):
        super(UInode, self).__init__()

        self._ap_topic = rospy.Publisher('/apctrl', Empty, queue_size=5)
        self.webcam_shutdown_pub = rospy.Publisher('/webcam/shutdown',
                                                   Empty,
                                                   queue_size=5)

        self.swap_red_blue = rospy.get_param('~swap_red_blue', False)

        self.controller = DroneController(
            offline_timeout=rospy.get_param('~connection_check_period', 500))

        self.keymap = self.gen_keymap()
        # Without release action
        self.keymap2 = self.gen_keymap2()

        self.messages = Messages(
            rospy.get_param('~message_display_time', 5000), *grid)

        self.messages_named_template = re.compile(
            r'((?P<name>[a-zA-Z0-9_-]+)::)?(?P<message>.*)')

        self.setWindowTitle('ARdrone camera')
        self.image_box = QtGui.QLabel(self)
        self.setCentralWidget(self.image_box)

        self.image = None
        self.image_lock = Lock()

        fps = rospy.get_param('~fps', 50)

        self.redraw_timer = QtCore.QTimer(self)
        self.redraw_timer.timeout.connect(self.on_redraw)
        self.redraw_timer.start(1000 / fps)

        rospy.Subscriber('/ui/message', String, self.on_ui_request)
        rospy.Subscriber('/out/image', Image, self.on_video_update)

    def on_ui_request(self, message):
        """Process the message show request

        We have spetial `ui/message` topic where any node can send
        any message and that message will be displayed.

        By default, messages are displayed for a while and than hidden.

        Messages which match the mask `([a-zA-Z0-9_-])::(.*)` will be displayed
        permanently. Newer messages will overwrite older messages
        with the same name.

        """
        match = self.messages_named_template.match(message.data)
        self.messages.message_put(**match.groupdict())

    def on_video_update(self, data):
        """On each frame we save new picture for future rendering"""
        self.communication_since_timer = True

        image = QtGui.QImage(data.data, data.width, data.height,
                             QtGui.QImage.Format_RGB888)
        if self.swap_red_blue:
            image = QtGui.QImage.rgbSwapped(image)

        with self.image_lock:
            self.image = image

    def on_redraw(self):
        """Redraw interface"""
        image = None
        with self.image_lock:
            if self.image is not None:
                image = QtGui.QPixmap.fromImage(self.image)
            else:
                image = QtGui.QPixmap(640, 360)
                image.fill(QtGui.QColor(50, 50, 50))

        self.messages.messages_put((
            (self.controller.status.readable(), 'drone.status'),
            (self.controller.battery, 'drone.battery'),
        ))

        self.messages.render(image)

        self.resize(image.width(), image.height())
        self.image_box.setPixmap(image)

    """Homing function that checks continuously if the Euclidiean distance of marker to drone is greater or less than the setError. If greater, moves the drone closer to the marker position"""

    def Homing(self):
        """keypressChecker is used to assure that the function only happens once on a keypress. A keypress always yields two calls normally."""
        global keypressChecker
        keypressChecker = keypressChecker + 1
        if (keypressChecker == 2):
            global xdistance
            global ydistance
            """ setError is the maximum distance the drone can be away from the marker to be considered at the marker """
            setError = 0.05
            while (CheckHypo() > setError):
                if (xdistance > 0 and ydistance > 0):
                    time.sleep(.3)
                    self.controller.send_vel(-.0005, -.0005, 0, 0)
                    print("GO RIGHT FORWARD")
                if (xdistance > 0 and ydistance < 0):
                    time.sleep(.3)
                    self.controller.send_vel(-.0005, .0005, 0, 0)
                    print("GO RIGHT BACKWARD")
                if (xdistance < 0 and ydistance > 0):
                    time.sleep(.3)
                    self.controller.send_vel(.0005, -.0005, 0, 0)
                    print("GO LEFT FORWARD")
                if (xdistance < 0 and ydistance < 0):
                    time.sleep(.3)
                    self.controller.send_vel(.0005, .0005, 0, 0)
                    print("GO LEFT BACKWARD")
            keypressChecker = 0
            self.controller.send_vel(0, 0, 0, 0)
            self.controller.land()

    def gen_keymap2(self):
        return {
            QtCore.Qt.Key.Key_M: lambda ax, e: self.controller.enable_cv(),
            QtCore.Qt.Key.Key_N:
            lambda ax, e: self.controller.enable_controller(),
        }

    def gen_keymap(self):
        return {
            QtCore.Qt.Key.Key_R:
            lambda ax, e: self.controller.reset(),
            QtCore.Qt.Key.Key_T:
            lambda ax, e: self.controller.takeoff(),
            QtCore.Qt.Key.Key_L:
            lambda ax, e: self.controller.land(),
            QtCore.Qt.Key.Key_C:
            lambda ax, e: self.controller.change_camera(),
            QtCore.Qt.Key.Key_M:
            lambda ax, e: self.controller.enable_cv(),
            QtCore.Qt.Key.Key_N:
            lambda ax, e: self.controller.enable_controller(),
            QtCore.Qt.Key.Key_F1:
            lambda ax, e: self.controller.increaseP(),
            QtCore.Qt.Key.Key_F2:
            lambda ax, e: self.controller.decreaseP(),
            QtCore.Qt.Key.Key_F3:
            lambda ax, e: self.controller.increaseI(),
            QtCore.Qt.Key.Key_F4:
            lambda ax, e: self.controller.decreaseI(),
            QtCore.Qt.Key.Key_F5:
            lambda ax, e: self.controller.increaseD(),
            QtCore.Qt.Key.Key_F6:
            lambda ax, e: self.controller.decreaseD(),
            QtCore.Qt.Key.Key_H:
            lambda ax, e: self.controller.hover(),
            QtCore.Qt.Key.Key_A:
            lambda ax, e: self.controller.send_vel(y=ax),
            QtCore.Qt.Key.Key_D:
            lambda ax, e: self.controller.send_vel(y=-ax),
            QtCore.Qt.Key.Key_W:
            lambda ax, e: self.controller.send_vel(x=ax),
            QtCore.Qt.Key.Key_S:
            lambda ax, e: self.controller.send_vel(x=-ax),
            QtCore.Qt.Key.Key_Q:
            lambda ax, e: self.controller.send_vel(yaw=ax),
            QtCore.Qt.Key.Key_E:
            lambda ax, e: self.controller.send_vel(yaw=-ax),
            QtCore.Qt.Key.Key_BracketRight:
            lambda ax, e: self.controller.send_vel(z=ax),
            QtCore.Qt.Key.Key_BracketLeft:
            lambda ax, e: self.controller.send_vel(z=-ax),
            QtCore.Qt.Key.Key_Y:
            lambda ax, e: self._ap_topic.publish(Empty()) if ax != 0 else None,
            QtCore.Qt.Key.Key_J:
            lambda ax, e: self.Homing(),
        }

    def markerListener(self):
        print("This is going on:")
        rospy.Subscriber("visualization_marker", Marker, markerCallback)

    def droneListener(self):
        print("This is going on too!:")
        rospy.Subscriber("/orb/pose_scaled", PoseStamped, droneCallback)

    def keyPressEvent(self, event):
        key = event.key()

        if event.isAutoRepeat() or self.controller is None:
            return

        if key in self.keymap:
            self.keymap[key](1, event)

    def keyReleaseEvent(self, event):
        key = event.key()

        if event.isAutoRepeat() or self.controller is None:
            return

        if key in self.keymap and key not in self.keymap2:
            self.keymap[key](0, event)
Esempio n. 5
0
class ControllerNode(object):
    def __init__(self):
        self.last_cmd_sent = datetime.now()

        self.controller = DroneController()

        self.pub = rospy.Publisher('/out/image', Image, queue_size=5)

        self.bridge = CvBridge()
        self.encoding = rospy.get_param('~encoding', 'bgr8')

        self.info = None

        rospy.Subscriber('/in/info', CameraInfo, self.on_info, queue_size=1)
        rospy.Subscriber('/in/image', Image, self.on_image, queue_size=1)

        path = join(dirname(dirname(__file__)), 'data/target.jpg')
        path = rospy.get_param('~path', path)

        self.detector = cv2.ORB()
        self.pattern = cv2.imread(path, 0)
        self.pattern_detect = self.detect(self.pattern)

        self.min_points = 15

        self.matcher = cv2.FlannBasedMatcher(
            dict(
                algorithm=6,
                table_number=6,
                key_size=12,
                multi_probe_level=1
            ),
            dict(
                checks=100
            )
        )

        h, w = self.pattern.shape

        self.target_points_2d = np.float32(
            [[0, 0], [0, h-1], [w-1, h-1], [w-1, 0]]).reshape(-1, 1, 2)

        h_, w_ = h // 2, w // 2

        self.target_points_3d = np.float32(
            [[-w_+1, -h_+1, 0],
             [-w_+1, h_-1, 0],
             [w_-1, h_-1, 0],
             [w_-1, -h_+1, 0]])

        self.x_ratio = 0.7
        self.y_ratio = 0.7

        self.match_test_ratio = 0.8

        self.knn_parameter = 2

        self.filter = MovingAverage()

        self.tf = tf.TransformListener()

        self.camera_model = image_geometry.PinholeCameraModel()

        self._ap_ctrl = rospy.Subscriber('/apctrl', Empty, self.on_toggle,
                                         queue_size=1)

        self.is_active = False

    def send_vel(self, x=0, y=0):
        if self.is_active:
            if x == 0 and y == 0:
                self.controller.hover()
            else:
                self.controller.send_vel(x, y)
            print(x, y, str(datetime.now() - self.last_cmd_sent))
            self.last_cmd_sent = datetime.now()

    def on_toggle(self, e):
        self.is_active = not self.is_active
        print(self.is_active)

    def on_info(self, data):
        self.info = data

    frame = 0

    def on_image(self, data):
        if self.info is None:
            return

        if self.frame < 2:
            self.frame += 1
            return
        else:
            self.frame = 0

        img = self.bridge.imgmsg_to_cv2(data, self.encoding)

        # cv2.imshow('Raw', img)

        img_out = self.process_image(img)

        # cv2.imshow('Result', img_out)

        img_msg = self.bridge.cv2_to_imgmsg(img_out, self.encoding)
        self.pub.publish(img_msg)

        # cv2.cv.WaitKey(1)

    def process_image(self, img):
        if self.info is None:
            self.process_data(None)
            return

        image_detect = self.detect(img)

        if len(image_detect.kp) < self.min_points:
            self.process_data(None)
            return img

        matches = self.match(image_detect)
        matched_image_detect = self.filter_train_kp(
            image_detect, matches)
        matched_pattern_detect = self.filter_query_kp(
            self.pattern_detect, matches)

        if len(matches) < self.min_points:
            self.process_data(None)
            return self.draw_match(img, image_detect, matched_image_detect,
                                   success=False)

        transform, mask = cv2.findHomography(
            np.float32(map(attrgetter('pt'), matched_pattern_detect.kp)),
            np.float32(map(attrgetter('pt'), matched_image_detect.kp)),
            cv2.RANSAC, 5.0)

        bbox = cv2.perspectiveTransform(self.target_points_2d, transform)

        if not self.check_match(bbox):
            self.process_data(None)
            return self.draw_match(img, image_detect, matched_image_detect,
                                   bbox, success=False)

        camera_matrix = np.float32(self.info.K).reshape(3, 3)
        camera_distortion = np.float32(self.info.D)

        rot, trans, inl = cv2.solvePnPRansac(
            self.target_points_3d, np.float32(bbox),
            camera_matrix, camera_distortion,
            iterationsCount=5,
            flags=cv2.ITERATIVE,
            reprojectionError=20
        )

        self.process_data(trans, img)
        return self.draw_match(img, image_detect, matched_image_detect, bbox,
                               success=True)

    def detect(self, img, *args, **kwargs):
        """Detects keypoints and descriptors of the image

        Returns a `Detect` class instance.

        """
        kp, desc = self.detector.detectAndCompute(img, None, *args, **kwargs)
        return Detect(kp, desc)

    def match(self, image_detect):
        """Matches the detected features against the pattern features"""
        matches = self.matcher.knnMatch(self.pattern_detect.desc,
                                        image_detect.desc,
                                        k=self.knn_parameter)
        good_matches = [i for i in matches if len(i) == 2]
        ratio_matches = [m for m, n in good_matches
                         if m.distance < self.match_test_ratio * n.distance]
        return ratio_matches

    def draw_match(self, img, image_detect, good_image_detect,
                   bbox=None, success=True):
        """Draws the found and matched keypoints and the target rectangle"""
        if success:
            color = (0, 255, 0)
        else:
            color = (0, 0, 255)

        img = cv2.drawKeypoints(img, image_detect.kp,
                                color=(0, 20, 20), flags=0)
        img = cv2.drawKeypoints(img, good_image_detect.kp,
                                color=color, flags=0)

        if bbox is not None:
            cv2.polylines(img, [np.int32(bbox)], True, color, 3)

        return img

    def check_match(self, points):
        """Returns True if the given vertexes for a valid rectandle"""
        if len(points) != 4:
            return False

        a = self.distance(points[0][0], points[1][0])
        b = self.distance(points[1][0], points[2][0])
        c = self.distance(points[2][0], points[3][0])
        d = self.distance(points[3][0], points[0][0])

        if (a / c < self.x_ratio or c / a < self.x_ratio or
                b / d < self.y_ratio or d / b < self.y_ratio):
            return False

        return True

    @staticmethod
    def filter_query_kp(query_detect, matches):
        """Filters the matched `query_detect` keypoints

        Returns a `Detect` class instance with filtered keypoints iterable
        and no descriptors.

        """
        return Detect([query_detect.kp[m.queryIdx] for m in matches])

    @staticmethod
    def filter_train_kp(train_detect, matches):
        """Filters the matched `train_detect` keypoints

        Returns a `Detect` class instance with filtered keypoints iterable
        and no descriptors.

        """
        return Detect([train_detect.kp[m.trainIdx] for m in matches])

    @staticmethod
    def distance(a, b):
        return ((a[0] - b[0]) ** 2 + (a[1] - b[1]) ** 2) ** 0.5

    def get_rot_matrix(self, rev=False):
        frm, to = 'ardrone/odom', 'ardrone/ardrone_base_bottomcam'
        if rev:
            frm, to = to, frm

        self.tf.waitForTransform(frm, to, rospy.Time(0), rospy.Duration(3))
        trans, rot = self.tf.lookupTransform(frm, to, rospy.Time(0))

        x, y, z = euler_from_quaternion(rot)
        yaw = euler_from_quaternion(rot, axes='szxy')[0]

        return yaw, np.array(euler_matrix(-x, -y, z))

    def process_data(self, local_coordinates, img=None):
        coordinates = self.filter(local_coordinates)
        if coordinates is None:
            if datetime.now() - self.last_cmd_sent > timedelta(seconds=0.3):
                self.send_vel(0, 0)
            return

        coordinates.resize(1, 4)

        yaw, rot_matrix = self.get_rot_matrix(rev=True)

        point = coordinates.dot(rot_matrix)

        if any(point[0]) and point[0][2] < 5000:
            x, y, z = point[0][0], point[0][1], point[0][2]
            x, y = x * cos(yaw) - y * sin(yaw), x * sin(yaw) + y * cos(yaw)

            y -= 50

            left = (min(max(x, -700), 700) / 10000)
            forward = -(min(max(y, -700), 700) / 10000)

            self.send_vel(forward, left)

        if datetime.now() - self.last_cmd_sent > timedelta(seconds=0.3):
            self.send_vel(0, 0)