Пример #1
0
    def evaluate(self):
        if self._distributed:
            comm.synchronize()
            predictions = comm.gather(self._predictions, dst=0)
            predictions = list(itertools.chain(*predictions))

            if not comm.is_main_process():
                return {}
        else:
            predictions = self._predictions
            gt_corrs = self._gt_corrs

        if len(predictions) == 0:
            self._logger.warning(
                "[COCOEvaluator] Did not receive valid predictions.")
            return {}

        if self._output_dir:
            pm = PathManager()
            pm.mkdirs(self._output_dir)
            file_path = os.path.join(self._output_dir,
                                     "instances_predictions.pth")
            with pm.open(file_path, "wb") as f:
                torch.save(predictions, f)

        self._results = OrderedDict()
        # if not self._visualize:
        single_predictions = self._siamese_to_single(predictions)
        if "proposals" in single_predictions[0]:
            self._eval_box_proposals(single_predictions)
        if "instances" in single_predictions[0]:
            # self._eval_predictions(set(self._tasks), single_predictions)
            self._eval_plane(single_predictions)
        if "depth_l1_dist" in single_predictions[0]:
            self._eval_depth(single_predictions)
        if "embedding" in self._plane_tasks:
            self._eval_affinity(predictions)
        if "camera" in self._plane_tasks:
            summary = self._eval_camera(predictions)
            file_path = os.path.join(self._output_dir, "summary.pkl")
            with open(file_path, "wb") as f:
                pickle.dump(summary, f)
        # Copy so the caller can do whatever with results
        return copy.deepcopy(self._results)
Пример #2
0
 def _siamese_to_coco(self, siamese_json):
     assert self._output_dir
     save_json = os.path.join(self._output_dir, "siamese2coco.json")
     pm = PathManager()
     pm.mkdirs(os.path.dirname(save_json))
     with file_lock(save_json):
         if pm.exists(save_json):
             logger.warning(
                 f"Using previously cached COCO format annotations at '{save_json}'. "
                 "You need to clear the cache file if your dataset has been modified."
             )
         else:
             logger.info(
                 f"Converting annotations of dataset '{siamese_json}' to COCO format ...)"
             )
             with pm.open(siamese_json, "r") as f:
                 siamese_data = json.load(f)
             coco_data = {"data": []}
             exist_imgid = set()
             for key, datas in siamese_data.items():
                 # copy 'info', 'categories'
                 if key != "data":
                     coco_data[key] = datas
                 else:
                     for data in datas:
                         for i in range(2):
                             img_data = data[str(i)]
                             if img_data["image_id"] in exist_imgid:
                                 continue
                             else:
                                 exist_imgid.add(img_data["image_id"])
                                 coco_data[key].append(img_data)
             self._logger.info(
                 f"Number of unique images: {len(exist_imgid)}.")
             coco_data = convert_to_coco_dict(coco_data["data"],
                                              self._metadata)
             with pm.open(save_json, "w") as f:
                 json.dump(coco_data, f)
     return save_json