Exemplo n.º 1
0
    def log_total_metrics(self):
        def exp_one(arg):
            return str(arg).center(20)

        def exp_arg(args):
            return [exp_one(arg) for arg in args]

        log_line()
        log_head(' Result metrics values for {} IoU threshold '.format(
            self._iou_threshold))
        log_head(' Confusion matrix ')

        sorted_gt_names = sorted(self._class_mapping.keys())
        pred_names = [
            self._class_mapping[gt_name] for gt_name in sorted_gt_names
        ]
        logger.info(''.join(exp_arg([''] + pred_names + ['False Negatives'])))
        for gt_name in sorted_gt_names:
            logger.info(''.join([exp_one(gt_name)] + exp_arg([
                self._confusion_matrix[gt_name, pred_name]
                for pred_name in pred_names
            ]) + [exp_one(self._unmatched_gt[gt_name])]))
            log_line()
        logger.info(''.join([exp_one('False Positives')] + exp_arg(
            [self._unmatched_pred[pred_name]
             for pred_name in pred_names]) + [exp_one('0')]))
        log_line()
Exemplo n.º 2
0
    def log_total_metrics(self):
        log_line()
        log_head(' Result metrics values for {} IoU threshold '.format(self._iou_threshold))

        classes_values = self.get_metrics()
        for i, (cls_gt, pair_values) in enumerate(classes_values.items()):
            average_precision = pair_values[AP]
            log_line()
            log_head(' Results for pair of classes <<{} <-> {}>>  '.format(cls_gt,
                                                                           self._gt_to_pred_class_mapping[cls_gt]))
            logger.info('Average Precision (AP): {}'.format(average_precision))

        log_line()
        log_head(' Mean metrics values ')
        logger.info('Mean Average Precision (mAP): {}'.format(self.average_per_class_avg_precision(classes_values)))
        log_line()
Exemplo n.º 3
0
    def log_total_metrics(self):
        log_line()
        log_head(' Result metrics values for {} IoU threshold '.format(self._iou_threshold))

        for i, (gt_class, values) in enumerate(self.get_metrics().items()):
            log_line()
            log_head(' Results for pair of classes <<{} <-> {}>>  '.format(gt_class,
                                                                           self._gt_to_pred_class_mapping[gt_class]))
            logger.info('Precision: {}'.format(values[PRECISION]))
            logger.info('Recall: {}'.format(values[RECALL]))

        log_line()
        log_head(' Total metrics values ')
        total_values = self.get_total_metrics()
        logger.info('Precision: {}'.format(total_values[PRECISION]))
        logger.info('Recall: {}'.format(total_values[RECALL]))
        log_line()