def test_open_new_path_manager(self) -> None: with self._patch_download(): path_manager = PathManager() with self.assertRaises(OSError): # no handler registered f = path_manager.open(self._remote_uri, "rb") path_manager.register_handler(HTTPURLHandler()) with path_manager.open(self._remote_uri, "rb") as f: self.assertTrue(os.path.isfile(f.name)) self.assertTrue(f.read() != "")
def _read_image(file_name: str, path_manager: PathManager, format=None): """ Read an image from a file using Pillow. Args: file_name: image file path. path_manager: PathManager for interpreting file_name. format: one of ["RGB", "BGR"] Returns: image: an image of shape (H, W, C). """ if format not in ["RGB", "BGR"]: raise ValueError("format can only be one of [RGB, BGR]; got %s", format) with path_manager.open(file_name, "rb") as f: # pyre-fixme[6]: Expected `Union[str, typing.BinaryIO]` for 1st param but # got `Union[typing.IO[bytes], typing.IO[str]]`. image = Image.open(f) if format is not None: # PIL only supports RGB. First convert to RGB and flip channels # below for BGR. image = image.convert("RGB") image = np.asarray(image).astype(np.float32) if format == "BGR": image = image[:, :, ::-1] return image
def _open_file(f, path_manager: PathManager, mode="r") -> ContextManager[IO]: if isinstance(f, str): f = path_manager.open(f, mode) return contextlib.closing(f) elif isinstance(f, pathlib.Path): f = f.open(mode) return contextlib.closing(f) else: return nullcontext(f)
def _siamese_to_coco(self, siamese_json): assert self._output_dir save_json = os.path.join(self._output_dir, "siamese2coco.json") pm = PathManager() pm.mkdirs(os.path.dirname(save_json)) with file_lock(save_json): if pm.exists(save_json): logger.warning( f"Using previously cached COCO format annotations at '{save_json}'. " "You need to clear the cache file if your dataset has been modified." ) else: logger.info( f"Converting annotations of dataset '{siamese_json}' to COCO format ...)" ) with pm.open(siamese_json, "r") as f: siamese_data = json.load(f) coco_data = {"data": []} exist_imgid = set() for key, datas in siamese_data.items(): # copy 'info', 'categories' if key != "data": coco_data[key] = datas else: for data in datas: for i in range(2): img_data = data[str(i)] if img_data["image_id"] in exist_imgid: continue else: exist_imgid.add(img_data["image_id"]) coco_data[key].append(img_data) self._logger.info( f"Number of unique images: {len(exist_imgid)}.") coco_data = convert_to_coco_dict(coco_data["data"], self._metadata) with pm.open(save_json, "w") as f: json.dump(coco_data, f) return save_json
def __init__(self, vocab_path: Optional[str] = None, trainable=False, speed: int = 0): super(WhitespaceTokenizer, self).__init__() self.trainable = trainable self.speed = speed # mock a real tokenizer: slowing down tokenization speed self.unknown = "unknown" self.vocab: Dict[str, int] = {self.unknown: 0} # load vocab path_manager = PathManager() if vocab_path: with path_manager.open(vocab_path, "r") as f: for line in f.readlines(): token = line.split()[0] self.vocab[token] = len(self.vocab)
def evaluate(self): if self._distributed: comm.synchronize() predictions = comm.gather(self._predictions, dst=0) predictions = list(itertools.chain(*predictions)) if not comm.is_main_process(): return {} else: predictions = self._predictions gt_corrs = self._gt_corrs if len(predictions) == 0: self._logger.warning( "[COCOEvaluator] Did not receive valid predictions.") return {} if self._output_dir: pm = PathManager() pm.mkdirs(self._output_dir) file_path = os.path.join(self._output_dir, "instances_predictions.pth") with pm.open(file_path, "wb") as f: torch.save(predictions, f) self._results = OrderedDict() # if not self._visualize: single_predictions = self._siamese_to_single(predictions) if "proposals" in single_predictions[0]: self._eval_box_proposals(single_predictions) if "instances" in single_predictions[0]: # self._eval_predictions(set(self._tasks), single_predictions) self._eval_plane(single_predictions) if "depth_l1_dist" in single_predictions[0]: self._eval_depth(single_predictions) if "embedding" in self._plane_tasks: self._eval_affinity(predictions) if "camera" in self._plane_tasks: summary = self._eval_camera(predictions) file_path = os.path.join(self._output_dir, "summary.pkl") with open(file_path, "wb") as f: pickle.dump(summary, f) # Copy so the caller can do whatever with results return copy.deepcopy(self._results)
def _open(self, path, mode="r", **kwargs): return PathManager.open(self._get_local_path(path), mode, **kwargs)