Пример #1
0
class VisualCompassDummyHandler():
    """
    Implements a ROS independent handler for a Visual Compass worker.
    """
    def __init__(self):
        dirname = os.path.dirname(__file__)
        relative_path = "../config/config.yaml"
        config_path = os.path.join(dirname, relative_path)

        with open(config_path, 'r') as stream:
            config = yaml.load(stream)

        source = config['dummy_handler_input']

        if isinstance(source, string):
            root_folder = os.curdir
            source = root_folder + source

        self.video_getter = Videocv(source)
        self.video_getter.run()

        self.vc = VisualCompass(config)

        self.loop(config)

    def debug_image_callback(self, debug_image):
        """
        param debug_image: shows this debug image
        """
        cv2.imshow("Video", debug_image)

    def data_callback(self, angle, confidence):
        """
        Get the data call back of the visual compass
        param angle: angle calculated by the visual compass
        param confidence: confidence of that angle
        """
        print("Angle: {} | Confidence: {}".format(angle, confidence))

    def loop(self, config):
        """
        Processes stuff, feeds the compass with images, and sets the map
        param config: config dict
        """
        side = 0
        while True:
            image = self.video_getter.frame

            k = cv2.waitKey(1)

            #TODO remove
            #self.debug_image_callback(image)

            sides = config['compass_multiple_map_image_count'] if config[
                'compass_type'] == 'multiple' else 2
            if side < sides:
                self.debug_image_callback(image)
                # Wurde SPACE gedrueckt
                if k % 256 == 32:
                    angle = float(side) / sides * math.pi * 2
                    self.vc.set_truth(angle, image)
                    side += 1
            else:
                self.vc.process_image(image,
                                      resultCB=self.data_callback,
                                      debugCB=self.debug_image_callback)

            # Abbrechen mit ESC
            if k % 256 == 27 or 0xFF == ord('q') or self.video_getter.ended:
                break
        self.video_getter.stop()
        cv2.destroyAllWindows()
Пример #2
0
class VisualCompassSetup():
    # type: () -> None
    """
    TODO docs
    Subscribes to raw image

    Trigger: 'trigger_visual_compass'-trigger
        Gets triggered e.i. while looking at a goal side
        Returns side
    """
    def __init__(self):
        # type: () -> None
        """
        Initiate VisualCompassHandler

        return: None
        """
        # Init ROS package
        rospack = rospkg.RosPack()
        self.package_path = rospack.get_path('bitbots_visual_compass')

        rospy.init_node('bitbots_visual_compass_setup')
        rospy.loginfo('Initializing visual compass setup')

        self.bridge = CvBridge()

        self.config = {}
        self.image_msg = None
        self.compass = None
        self.hostname = socket.gethostname()

        # TODO: docs
        self.base_frame = 'base_footprint'
        self.camera_frame = 'camera_optical_frame'
        self.tf_buffer = tf2.Buffer(cache_time=rospy.Duration(50))
        self.listener = tf2.TransformListener(self.tf_buffer)

        self.pub_head_mode = rospy.Publisher('head_mode',
                                             HeadMode,
                                             queue_size=1)

        # Register VisualCompassConfig server for dynamic reconfigure and set callback
        Server(VisualCompassConfig, self.dynamic_reconfigure_callback)

        rospy.logwarn("------------------------------------------------")
        rospy.logwarn("||                 WARNING                    ||")
        rospy.logwarn("||Please remove the LAN cable from the Robot, ||")
        rospy.logwarn("||after pressing 'YES' you have 10 Seconds    ||")
        rospy.logwarn("||until the head moves OVER the LAN port!!!   ||")
        rospy.logwarn("------------------------------------------------\n\n")

        try:
            input = raw_input
        except NameError:
            pass

        accept = input("Do you REALLY want to start? (YES/n)")

        if accept == "YES":
            rospy.logwarn("REMOVE THE LAN CABLE NOW!!!!!")

            rospy.sleep(10)

            head_mode = HeadMode()
            head_mode.headMode = 10
            self.pub_head_mode.publish(head_mode)
            rospy.loginfo("Head mode has been set!")

            rospy.spin()
        else:
            rospy.signal_shutdown(
                "You aborted the process! Shuting down correctly.")

    def dynamic_reconfigure_callback(self, config, level):
        # type: (dict, TODO) -> None
        """
        TODO docs
        """
        self.compass = VisualCompass(config)

        if self.changed_config_param(config, 'compass_type') or \
            self.changed_config_param(config, 'compass_matcher') or \
            self.changed_config_param(config, 'compass_multiple_ground_truth_images_count'):

            self.ground_truth_images_count = 0
            self.processed_set_all_ground_truth_images = False

            rospy.loginfo(
                'Loaded configuration: compass type: %(type)s | matcher type: %(matcher)s | ground truth images: %(ground_truth_count)d'
                % {
                    'type':
                    config['compass_type'],
                    'matcher':
                    config['compass_matcher'],
                    'ground_truth_count':
                    config['compass_multiple_ground_truth_images_count']
                })

        # Subscribe to Image-message
        if self.changed_config_param(config, 'img_msg_topic') or \
            self.changed_config_param(config, 'img_msg_queue_size'):
            if hasattr(self, 'sub_image_msg'):
                self.sub_image_msg.unregister()
            self.sub_image_msg = rospy.Subscriber(
                config['img_msg_topic'],
                Image,
                self.image_callback,
                queue_size=config['img_msg_queue_size'],
                tcp_nodelay=True,
                buff_size=60000000)
            # https://github.com/ros/ros_comm/issues/536

        # Register message server to call set truth callback
        if self.changed_config_param(config, 'ground_truth_trigger_topic') or \
            self.changed_config_param(config, 'ground_truth_trigger_queue_size'):
            if hasattr(self, 'sub_trigger_set_ground_truth'):
                self.sub_image_msg.unregister()
            self.sub_trigger_set_ground_truth = rospy.Subscriber(
                config['ground_truth_trigger_topic'],
                Header,
                self.set_truth_callback,
                queue_size=config['ground_truth_trigger_queue_size'])

        self.config = config

        self.check_ground_truth_images_count()

        return self.config

    def set_truth_callback(self, request):
        if self.image_msg:
            # TODO: check timestamps

            orientation = self.tf_buffer.lookup_transform(
                self.base_frame,
                self.camera_frame,
                self.image_msg.header.stamp,
                timeout=rospy.Duration(0.5)).transform.rotation
            yaw_angle = (euler_from_quaternion(
                (orientation.x, orientation.y, orientation.z,
                 orientation.w))[2] + 0.5 * math.pi) % (2 * math.pi)

            image = self.bridge.imgmsg_to_cv2(self.image_msg, 'bgr8')

            self.compass.set_truth(yaw_angle, image)
            self.ground_truth_images_count += 1
            self.check_ground_truth_images_count()

        else:
            rospy.logwarn('No image received yet.')

    def image_callback(self, image_msg):
        # type: (Image) -> None
        """
        TODO docs
        """
        # Drops old images
        # TODO: fix
        # image_age = rospy.get_rostime() - image_msg.header.stamp
        # if image_age.to_sec() > 0.1:
        #     print("Visual Compass: Dropped Image-message")  # TODO debug printer
        #     return

        self.image_msg = image_msg

    def check_ground_truth_images_count(self):
        # type: () -> None
        """
        TODO docs
        """
        config_ground_truth_images_count = self.config[
            'compass_multiple_ground_truth_images_count']
        if self.ground_truth_images_count != config_ground_truth_images_count:
            rospy.loginfo(
                'Visual compass: %(var)d of %(config)d ground truth images set. More images are needed.'
                % {
                    'var': self.ground_truth_images_count,
                    'config': config_ground_truth_images_count
                })
            self.processed_set_all_ground_truth_images = False
        else:
            if not (self.processed_set_all_ground_truth_images):
                rospy.loginfo(
                    'Visual compass: All ground truth images have been processed.'
                )
                self.save_ground_truth(self.config['ground_truth_file_path'])
            self.processed_set_all_ground_truth_images = True

    def save_ground_truth(self, ground_truth_file_path):
        # type (str) -> None
        """
        TODO docs
        """
        converter = KeyPointConverter()

        # get keypoints
        features = self.compass.get_ground_truth_features()

        # convert keypoints to basic values
        keypoints = features[0]
        keypoint_values = [converter.key_point2values(kp) for kp in keypoints]

        descriptors = features[1]

        meta = {
            'field':
            self.config['ground_truth_field'],
            'date':
            datetime.now(),
            'device':
            self.hostname,
            'compass_type':
            self.config['compass_type'],
            'compass_matcher':
            self.config['compass_matcher'],
            'compass_multiple_ground_truth_images_count':
            self.config['compass_multiple_ground_truth_images_count'],
            'keypoint_count':
            len(keypoint_values),
            'descriptor_count':
            len(descriptors)
        }

        dump_features = {
            'keypoint_values': keypoint_values,
            'descriptors': descriptors,
            'meta': meta
        }

        # generate file path
        file_path = self.package_path + ground_truth_file_path
        # warn, if file does exist allready
        if path.isfile(file_path):
            rospy.logwarn(
                'Ground truth file at: %(path)s does ALLREADY EXIST. This will be overwritten.'
                % {'path': file_path})
        # save keypoints in pickle file
        with open(file_path, 'wb') as f:
            pickle.dump(dump_features, f)
        info_str = "\n\t-----------------------------------------------------------------------------------------------------------------\n" + \
        "\tSaved ground truth file at: %(path)s\n" % {'path': file_path} + \
        "\tRUN the following command on your system (NOT THE ROBOT) to save the ground truth file in your current directory:\n" + \
        "\n\tscp bitbots@%(host)s:%(path)s .\n" % {'path': file_path, 'host': self.hostname} + \
        "\t-----------------------------------------------------------------------------------------------------------------"
        rospy.loginfo(info_str)

        # shutdown setup process
        rospy.signal_shutdown('Visual compass setup finished cleanly.')

    def changed_config_param(self, config, param_name):
        # type: (dict, str) -> bool
        """
        TODO
        """
        return param_name not in self.config or config[
            param_name] != self.config[param_name]
class MultipleEvaluator(object):
    def __init__(self, dimensions, angle_steps):
        self.dimensions = dimensions
        self.angle_steps = angle_steps

        dirname = os.path.dirname(__file__)
        relative_config_path = "../config/config.yaml"
        config_path = os.path.join(dirname, relative_config_path)

        with open(config_path, 'r') as stream:
            config = yaml.load(stream)

        relative_data_path = config['evaluation_data']
        self.data_path = os.path.join(dirname, relative_data_path)

        self.visualization_path = os.path.join(dirname,
                                               config["visualization"])

        self.loader = DataLoader(self.data_path, self.dimensions,
                                 self.angle_steps)

        self.sample_count = 2 if config['compass_type'] == 'binary' else config[
            'compass_multiple_map_image_count']
        self.vc = VisualCompass(config)

    def show_img(self, image):
        cv2.imshow("Record", image)
        k = cv2.waitKey(1)

    def evaluate(self):
        self.set_truth()
        self.evaluate_all_images()

    def set_truth(self):

        for i in range(self.sample_count):
            angle = float(i) / self.sample_count * math.radians(360)
            angle = (angle + math.radians(90)) % math.radians(360)
            image = self.loader.get_image(4, 3, angle)
            self.vc.set_truth(angle, image)
            self.show_img(image)
        cv2.destroyAllWindows()

    def evaluate_all_images(self):

        for i in range(16):

            plt.figure(i)
            self.evaluate_direction(math.pi / 8 * i, None)
            print(i)
            print("done")
            filename = "" + str(i) + ".png"
            plt.savefig(os.path.join(self.visualization_path, filename))

    def debug_image_callback(self, debug_image):
        return
        self.show_img(debug_image)
        time.sleep(0.5)

    def evaluate_direction(self, step, ax):
        ax = plt.subplot(1, 2, 1)

        U = np.zeros(self.dimensions)
        V = np.zeros(self.dimensions)
        C = np.zeros(self.dimensions)
        for i in range(self.dimensions[0]):
            for j in range(self.dimensions[1]):
                image = self.loader.get_image(i, j, step)
                angle, confidence = self.vc.process_image(image)
                z = np.exp(1j * angle)
                U[i, j] = np.real(z) * confidence
                V[i, j] = np.imag(z) * confidence
                C[i, j] = confidence

        normalizer = colors.Normalize(0, 1)
        quiver = ax.quiver(U,
                           V,
                           C,
                           pivot='mid',
                           scale_units='xy',
                           scale=1,
                           norm=normalizer)
        # Q = ax.quiver(U, V, C, pivot='mid')
        ax.axis('equal')
        ax.axis('off')

        plt.subplot(1, 2, 2)
        plt.imshow(self.loader.get_image(4, 3, step))
Пример #4
0
class BinaryEvaluator(object):
    def __init__(self, dimensions, angle_steps):
        self.dimensions = dimensions
        self.angle_steps = angle_steps

        dirname = os.path.dirname(__file__)
        relative_config_path = "../config/config.yaml"
        config_path = os.path.join(dirname, relative_config_path)

        with open(config_path, 'r') as stream:
            config = yaml.load(stream)

        relative_data_path = config['evaluation_data']
        self.data_path = os.path.join(dirname, relative_data_path)
        
        self.loader = DataLoader(self.data_path, self.dimensions, self.angle_steps)

        config['compass_type'] = 'multiple'
        
        self.sample_count = 2 if config['compass_type'] == 'binary' else config['compass_multiple_ground_truth_images_count']
        self.vc = VisualCompass(config)
    
    def show_img(self, image):
        cv2.imshow("Record", image)
        k = cv2.waitKey(1)

    def evaluate(self):
        self.setTruth()
        self.evaluateAllImages()

    def setTruth(self):
        for i in range(self.sample_count):
            angle = float(i) / self.sample_count * math.radians(360)
            image = self.loader.get_image(4, 3, self.float_mod(angle + math.radians(90), math.radians(360)))
            self.vc.set_truth(angle, image)
            self.show_img(image)
            time.sleep(0.5)
        cv2.destroyAllWindows()
    
    def debug_image_callback(self, debug_image):
        return
        self.show_img(debug_image)
        time.sleep(0.5)

    def evaluateAllImages(self):
        confidences = list()
        fail = 0.0
        unsave = 0.5
        confidence_threshold = 0.4
        for row in range(self.dimensions[0]):
            confidences.append(list())
            for checkpoint in range(self.dimensions[1]):
                confidences[row].append(list())
                fails = list()
                for angle in [4,12]: #,12]:# range(self.angle_steps):
                    image = self.loader.get_image(row, checkpoint, float(angle) / 16 * 2 * math.pi)
                    # self.show_img(image)
                    ground_truth = float(angle - 4)/16*2*math.pi
                    compass_result = self.vc.process_image(image, debugCB=self.debug_image_callback)
                    confidence = compass_result[1]
                    fail_val = 0.0
                    if (abs(ground_truth - compass_result[0]) > 0.0001 and compass_result[1] > confidence_threshold):
                        print("Bad detection", compass_result[0], ground_truth)
                        fail += 1
                        fail_val = 1.0
                    if compass_result[1] <= confidence_threshold:
                        if abs(ground_truth - compass_result[0]) > 0.0001:
                            print("Filtered false positive")
                        # self.show_img(image)
                        unsave += 1
                        print("Confidence too low. Value: {}".format(confidence))
                        confidence = 0.0
                    confidences[row][checkpoint].append(confidence)
                    fails.append(fail_val)
                for fail_obj in fails:
                    confidences[row][checkpoint].append(fail_obj)
        count = (self.dimensions[0] * self.dimensions[1]) * 2
        plt.title('Binary evaluation')
        plt.subplot(2, 2, 1)
        plt.title('Confidence values torwards goal 1')
        self.plot_confidence(confidences,0)
        plt.subplot(2, 2, 2)
        plt.title('Confidence values torwards goal 2')
        self.plot_confidence(confidences,1)
        plt.subplot(2, 2, 3)
        plt.title('False positives torwards goal 1')
        self.plot_confidence(confidences,2)
        plt.subplot(2, 2, 4)
        plt.title('False positives torwards goal 2')
        self.plot_confidence(confidences,3)
        plt.show()
        print("Fail {}%".format(fail/count*100))
        print("Unsave {}%".format(unsave/count*100))

    def plot_confidence(self, confidences, side):
        a = np.zeros((10, 7))
        for row_index, row in enumerate(confidences):
            for checkpoint_index, checkpoint in enumerate(row):
                a[row_index, checkpoint_index] = float(checkpoint[side])
        plt.imshow(a, cmap='hot', interpolation='nearest')

    def float_mod(self,  number, modulo):
        return number - modulo * math.floor(number / modulo)