Beispiel #1
0
    def _calculate_average_precision(gt_class, pred_class, pair_counters):
        if len(pair_counters[MATCHES]) == 0 or all(
                match.is_correct == False for match in pair_counters[MATCHES]):
            logger.warning(
                'No matching samples for pair {!r} <-> {!r} have been detected. '
                'MAP value for this pair will be set to 0.'.format(
                    gt_class, pred_class))
            return 0

        sorted_matches = sorted(pair_counters[MATCHES],
                                key=lambda match: match.confidence,
                                reverse=True)
        correct_indicators = [
            int(match.is_correct) for match in sorted_matches
        ]
        total_correct = np.cumsum(correct_indicators)
        recalls = total_correct / pair_counters[TOTAL_GROUND_TRUTH]
        precisions = total_correct / (np.arange(len(correct_indicators)) + 1)
        anchor_precisions = []
        for anchor_recall in np.linspace(0, 1, 11):
            points_above_recall = (recalls >= anchor_recall)
            anchor_precisions.append(
                np.max(precisions[points_above_recall]) if np.
                any(points_above_recall) else 0)
        return np.mean(anchor_precisions)
Beispiel #2
0
    def upload_paths(self, dataset_id, names, paths, progress_cb=None, metas=None):
        def path_to_bytes_stream(path):
            return open(path, 'rb')

        video_info_results = []

        hashes = [get_file_hash(x) for x in paths]

        self._upload_data_bulk(path_to_bytes_stream, zip(paths, hashes), progress_cb=progress_cb)
        metas = self._api.import_storage.get_meta_by_hashes(hashes)
        metas2 = [meta["meta"] for meta in metas]

        for name, hash, meta in zip(names, hashes, metas2):
            try:
                all_streams = meta["streams"]
                video_streams = get_video_streams(all_streams)
                for stream_info in video_streams:
                    stream_index = stream_info["index"]

                    #TODO: check is community
                    # if instance_type == sly.COMMUNITY:
                    #     if _check_video_requires_processing(file_info, stream_info) is True:
                    #         warn_video_requires_processing(file_name)
                    #         continue

                    item_name = name
                    info = self._api.video.get_info_by_name(dataset_id, item_name)
                    if info is not None:
                        item_name = gen_video_stream_name(name, stream_index)
                    res = self.upload_hash(dataset_id, item_name, hash, stream_index)
                    video_info_results.append(res)
            except Exception as e:
                logger.warning("File skipped {!r}: error occurred during processing {!r}".format(name, str(e)))

        return video_info_results
Beispiel #3
0
 def is_passes_confidence_threshold(tag):
     if tag.meta.value_type == TagValueType.NONE:
         return True
     elif tag.meta.value_type == TagValueType.ANY_NUMBER:
         return tag.value >= self._confidence_threshold
     elif tag.meta.value_type == TagValueType.ANY_STRING or tag.meta.value_type == TagValueType.ONEOF_STRING:
         logger.warning("Classification tag '{}'".format(tag.name))
         return True
Beispiel #4
0
    def run_evaluation(self):
        progress = Progress('metric evaluation', self._project_gt.total_items)
        for ds_name in self._project_gt.datasets.keys():
            ds_gt = self._project_gt.datasets.get(ds_name)
            ds_pred = self._project_pred.datasets.get(ds_name)

            for sample_name in ds_gt:
                try:
                    ann_gt = Annotation.load_json_file(ds_gt.get_ann_path(sample_name), self._project_gt.meta)
                    ann_pred = Annotation.load_json_file(ds_pred.get_ann_path(sample_name), self._project_pred.meta)
                    self._metric.add_pair(ann_gt, ann_pred)
                except ValueError as e:
                    logger.warning('An error has occured ({}). Sample "{}" in dataset "{}" will be skipped'
                                   .format(str(e), sample_name, ds_gt.name))
                progress.iter_done_report()