Exemple #1
0
 def opena(
     path: str,
     mode: str = "r",
     buffering: int = -1,
     encoding: Optional[str] = None,
     errors: Optional[str] = None,
     newline: Optional[str] = None,
 ):
     """
     Return file descriptor with asynchronous write operations.
     """
     global IOPathPathManager
     if not IOPathPathManager:
         logging.info("ioPath is initializing PathManager.")
         try:
             from iopath.common.file_io import PathManager
             IOPathPathManager = PathManager()
         except Exception:
             logging.exception(
                 "Failed to initialize ioPath PathManager object.")
     return IOPathPathManager.opena(
         path=path,
         mode=mode,
         buffering=buffering,
         encoding=encoding,
         errors=errors,
         newline=newline,
     )
Exemple #2
0
 def __init__(
     self,
     model: nn.Module,
     save_dir: str = "",
     *,
     save_to_disk: bool = True,
     **checkpointables: object,
 ) -> None:
     """
     Args:
         model (nn.Module): model.
         save_dir (str): a directory to save and find checkpoints.
         save_to_disk (bool): if True, save checkpoint to disk, otherwise
             disable saving for this checkpointer.
         checkpointables (object): any checkpointable objects, i.e., objects
             that have the `state_dict()` and `load_state_dict()` method. For
             example, it can be used like
             `Checkpointer(model, "dir", optimizer=optimizer)`.
     """
     if isinstance(model, (DistributedDataParallel, DataParallel)):
         model = model.module
     self.model = model
     self.checkpointables = copy.copy(checkpointables)  # pyre-ignore
     self.logger = logging.getLogger(__name__)  # pyre-ignore
     self.save_dir = save_dir
     self.save_to_disk = save_to_disk
     # Default PathManager, support HTTP URLs (for backward compatibility in open source).
     # A user may want to use a different project-specific PathManager
     self.path_manager: PathManager = PathManager()
     self.path_manager.register_handler(HTTPURLHandler())
Exemple #3
0
def save_obj(
    f,
    verts,
    faces,
    decimal_places: Optional[int] = None,
    path_manager: Optional[PathManager] = None,
):
    """
    Save a mesh to an .obj file.

    Args:
        f: File (or path) to which the mesh should be written.
        verts: FloatTensor of shape (V, 3) giving vertex coordinates.
        faces: LongTensor of shape (F, 3) giving faces.
        decimal_places: Number of decimal places for saving.
        path_manager: Optional PathManager for interpreting f if
            it is a str.
    """
    if len(verts) and not (verts.dim() == 2 and verts.size(1) == 3):
        message = "Argument 'verts' should either be empty or of shape (num_verts, 3)."
        raise ValueError(message)

    if len(faces) and not (faces.dim() == 2 and faces.size(1) == 3):
        message = "Argument 'faces' should either be empty or of shape (num_faces, 3)."
        raise ValueError(message)

    if path_manager is None:
        path_manager = PathManager()

    with _open_file(f, path_manager, "w") as f:
        return _save(f, verts, faces, decimal_places)
Exemple #4
0
def _load_texture_images(
    material_names: List[str],
    data_dir: str,
    material_properties: MaterialProperties,
    texture_files: TextureFiles,
    path_manager: PathManager,
) -> Tuple[MaterialProperties, TextureImages]:
    final_material_properties = {}
    texture_images = {}

    # Only keep the materials referenced in the obj.
    for material_name in material_names:
        if material_name in texture_files:
            # Load the texture image.
            path = os.path.join(data_dir, texture_files[material_name])
            if path_manager.exists(path):
                image = (_read_image(
                    path, path_manager=path_manager, format="RGB") / 255.0)
                image = torch.from_numpy(image)
                texture_images[material_name] = image
            else:
                msg = f"Texture file does not exist: {path}"
                warnings.warn(msg)

        if material_name in material_properties:
            final_material_properties[material_name] = material_properties[
                material_name]

    return final_material_properties, texture_images
Exemple #5
0
def _read_image(file_name: str, path_manager: PathManager, format=None):
    """
    Read an image from a file using Pillow.
    Args:
        file_name: image file path.
        path_manager: PathManager for interpreting file_name.
        format: one of ["RGB", "BGR"]
    Returns:
        image: an image of shape (H, W, C).
    """
    if format not in ["RGB", "BGR"]:
        raise ValueError("format can only be one of [RGB, BGR]; got %s",
                         format)
    with path_manager.open(file_name, "rb") as f:
        # pyre-fixme[6]: Expected `Union[str, typing.BinaryIO]` for 1st param but
        #  got `Union[typing.IO[bytes], typing.IO[str]]`.
        image = Image.open(f)
        if format is not None:
            # PIL only supports RGB. First convert to RGB and flip channels
            # below for BGR.
            image = image.convert("RGB")
        image = np.asarray(image).astype(np.float32)
        if format == "BGR":
            image = image[:, :, ::-1]
        return image
Exemple #6
0
def save_ply(
    f,
    verts: torch.Tensor,
    faces: Optional[torch.LongTensor] = None,
    verts_normals: Optional[torch.Tensor] = None,
    ascii: bool = False,
    decimal_places: Optional[int] = None,
    path_manager: Optional[PathManager] = None,
) -> None:
    """
    Save a mesh to a .ply file.

    Args:
        f: File (or path) to which the mesh should be written.
        verts: FloatTensor of shape (V, 3) giving vertex coordinates.
        faces: LongTensor of shape (F, 3) giving faces.
        verts_normals: FloatTensor of shape (V, 3) giving vertex normals.
        ascii: (bool) whether to use the ascii ply format.
        decimal_places: Number of decimal places for saving if ascii=True.
        path_manager: PathManager for interpreting f if it is a str.

    """

    if len(verts) and not (verts.dim() == 2 and verts.size(1) == 3):
        message = "Argument 'verts' should either be empty or of shape (num_verts, 3)."
        raise ValueError(message)

    if (
        faces is not None
        and len(faces)
        and not (faces.dim() == 2 and faces.size(1) == 3)
    ):
        message = "Argument 'faces' should either be empty or of shape (num_faces, 3)."
        raise ValueError(message)

    if (
        verts_normals is not None
        and len(verts_normals)
        and not (
            verts_normals.dim() == 2
            and verts_normals.size(1) == 3
            and verts_normals.size(0) == verts.size(0)
        )
    ):
        message = "Argument 'verts_normals' should either be empty or of shape (num_verts, 3)."
        raise ValueError(message)

    if path_manager is None:
        path_manager = PathManager()
    with _open_file(f, path_manager, "wb") as f:
        _save_ply(
            f,
            verts=verts,
            faces=faces,
            verts_normals=verts_normals,
            verts_colors=None,
            ascii=ascii,
            decimal_places=decimal_places,
        )
Exemple #7
0
    def __init__(self,
                 vocab_path: Optional[str] = None,
                 trainable=False,
                 speed: int = 0):
        super(WhitespaceTokenizer, self).__init__()
        self.trainable = trainable
        self.speed = speed  # mock a real tokenizer: slowing down tokenization speed

        self.unknown = "unknown"
        self.vocab: Dict[str, int] = {self.unknown: 0}

        # load vocab
        path_manager = PathManager()
        if vocab_path:
            with path_manager.open(vocab_path, "r") as f:
                for line in f.readlines():
                    token = line.split()[0]
                    self.vocab[token] = len(self.vocab)
Exemple #8
0
def load_ply(
    f, *, path_manager: Optional[PathManager] = None
) -> Tuple[torch.Tensor, torch.Tensor]:
    """
    Load the verts and faces from a .ply file.
    Note that the preferred way to load data from such a file
    is to use the IO.load_mesh and IO.load_pointcloud functions,
    which can read more of the data.

    Example .ply file format:

    ply
    format ascii 1.0           { ascii/binary, format version number }
    comment made by Greg Turk  { comments keyword specified, like all lines }
    comment this file is a cube
    element vertex 8           { define "vertex" element, 8 of them in file }
    property float x           { vertex contains float "x" coordinate }
    property float y           { y coordinate is also a vertex property }
    property float z           { z coordinate, too }
    element face 6             { there are 6 "face" elements in the file }
    property list uchar int vertex_index { "vertex_indices" is a list of ints }
    end_header                 { delimits the end of the header }
    0 0 0                      { start of vertex list }
    0 0 1
    0 1 1
    0 1 0
    1 0 0
    1 0 1
    1 1 1
    1 1 0
    4 0 1 2 3                  { start of face list }
    4 7 6 5 4
    4 0 4 5 1
    4 1 5 6 2
    4 2 6 7 3
    4 3 7 4 0

    Args:
        f:  A binary or text file-like object (with methods read, readline,
            tell and seek), a pathlib path or a string containing a file name.
            If the ply file is in the binary ply format rather than the text
            ply format, then a text stream is not supported.
            It is easiest to use a binary stream in all cases.
        path_manager: PathManager for loading if f is a str.

    Returns:
        verts: FloatTensor of shape (V, 3).
        faces: LongTensor of vertex indices, shape (F, 3).
    """

    if path_manager is None:
        path_manager = PathManager()
    verts, faces, _, _ = _load_ply(f, path_manager=path_manager)
    if faces is None:
        faces = torch.zeros(0, 3, dtype=torch.int64)

    return verts, faces
Exemple #9
0
def _open_file(f, path_manager: PathManager, mode="r") -> ContextManager[IO]:
    if isinstance(f, str):
        f = path_manager.open(f, mode)
        return contextlib.closing(f)
    elif isinstance(f, pathlib.Path):
        f = f.open(mode)
        return contextlib.closing(f)
    else:
        return nullcontext(f)
Exemple #10
0
def get_skateboard_data(
    avoid_manifold: bool = False,
    silence_logs: bool = False
) -> Generator[Tuple[str, PathManager], None, None]:
    """
    Context manager for accessing Co3D dataset by tests, at least for
    the first 5 skateboards. Internally, we want this to exercise the
    normal way to access the data directly manifold, but on an RE
    worker this is impossible so we use a workaround.

    Args:
        avoid_manifold: Use the method used by RE workers even locally.
        silence_logs: Whether to reduce log output from iopath library.

    Yields:
        dataset_root: (str) path to dataset root.
        path_manager: path_manager to access it with.
    """
    path_manager = PathManager()
    if silence_logs:
        logging.getLogger("iopath.fb.manifold").setLevel(logging.CRITICAL)
        logging.getLogger("iopath.common.file_io").setLevel(logging.CRITICAL)

    if not os.environ.get("FB_TEST", False):
        if os.getenv("FAIR_ENV_CLUSTER", "") == "":
            raise unittest.SkipTest("Unknown environment. Data not available.")
        yield "/checkpoint/dnovotny/datasets/co3d/download_aws_22_02_18", path_manager

    elif avoid_manifold or os.environ.get("INSIDE_RE_WORKER", False):
        from libfb.py.parutil import get_file_path

        par_path = "skateboard_first_5"
        source = get_file_path(par_path)
        assert Path(source).is_file()
        with tempfile.TemporaryDirectory() as dest:
            with ZipFile(source) as f:
                f.extractall(dest)
            yield os.path.join(dest, "extracted"), path_manager
    else:
        from iopath.fb.manifold import ManifoldPathHandler

        path_manager.register_handler(ManifoldPathHandler())

        yield "manifold://co3d/tree/extracted", path_manager
Exemple #11
0
    def evaluate(self):
        if self._distributed:
            comm.synchronize()
            predictions = comm.gather(self._predictions, dst=0)
            predictions = list(itertools.chain(*predictions))

            if not comm.is_main_process():
                return {}
        else:
            predictions = self._predictions
            gt_corrs = self._gt_corrs

        if len(predictions) == 0:
            self._logger.warning(
                "[COCOEvaluator] Did not receive valid predictions.")
            return {}

        if self._output_dir:
            pm = PathManager()
            pm.mkdirs(self._output_dir)
            file_path = os.path.join(self._output_dir,
                                     "instances_predictions.pth")
            with pm.open(file_path, "wb") as f:
                torch.save(predictions, f)

        self._results = OrderedDict()
        # if not self._visualize:
        single_predictions = self._siamese_to_single(predictions)
        if "proposals" in single_predictions[0]:
            self._eval_box_proposals(single_predictions)
        if "instances" in single_predictions[0]:
            # self._eval_predictions(set(self._tasks), single_predictions)
            self._eval_plane(single_predictions)
        if "depth_l1_dist" in single_predictions[0]:
            self._eval_depth(single_predictions)
        if "embedding" in self._plane_tasks:
            self._eval_affinity(predictions)
        if "camera" in self._plane_tasks:
            summary = self._eval_camera(predictions)
            file_path = os.path.join(self._output_dir, "summary.pkl")
            with open(file_path, "wb") as f:
                pickle.dump(summary, f)
        # Copy so the caller can do whatever with results
        return copy.deepcopy(self._results)
Exemple #12
0
class TestDriver:
    _pathmgr = PathManager()

    def test(self) -> None:
        model = Model()
        optimizer = optim.SGD(model.parameters(), lr=0.001, momentum=0.9)

        print("Model's state_dict:")
        for param_tensor in model.state_dict():
            print(f"{param_tensor}\t{model.state_dict()[param_tensor].size()}")
        print("Optimizer's state_dict:")
        for var_name in optimizer.state_dict():
            print(f"{var_name}\t{optimizer.state_dict()[var_name]}")

        with tempfile.TemporaryDirectory() as _tmpdir:
            try:
                URI = os.path.join(_tmpdir, "test.ckpt")

                f = self._pathmgr.opena(URI, "wb")
                i = "*"
                large = f"{i}" * 1000000000

                print("Starting `torch.save` call.")
                torch.save(
                    {
                        "model_state_dict": model.state_dict(),
                        "optimizer_state_dict": optimizer.state_dict(),
                        "large": large,
                    },
                    f,
                )
                f.close()
                start_time = time.time()

            finally:
                # We want this `join` call to take time. If it is instantaneous,
                # then our async write calls are not running asynchronously.
                print(
                    "Waiting for `torch.save` call to complete at `async_join()`."
                )
                self._pathmgr.async_join()

            print("Time Python waited for `async_join()` call to finish: "
                  f"{time.time() - start_time}s.")
            assert self._pathmgr.async_close()

            checkpoint = torch.load(URI)
            for key_item_1, key_item_2 in zip(
                    model.state_dict().items(),
                    checkpoint["model_state_dict"].items()):
                assert torch.equal(key_item_1[1], key_item_2[1])
            assert optimizer.state_dict() == checkpoint["optimizer_state_dict"]
            assert large == checkpoint["large"]

            print("Async `torch.save` Test succeeded.")
Exemple #13
0
    def _get_local_path(self, path, **kwargs):
        model_name = path[len(self.PREFIX):]
        dataset_name, *model_name, data_type = model_name.split('/')

        if data_type == 'weight':
            model_url = MODEL_CATALOG[dataset_name]['/'.join(model_name)]
        elif data_type == 'config':
            model_url = CONFIG_CATALOG[dataset_name]['/'.join(model_name)]
        else:
            raise ValueError(f"Unknown data_type {data_type}")
        return PathManager.get_local_path(model_url, **kwargs)
Exemple #14
0
def create_path_manager() -> PathManager:
    # TODO: move this inline import out after AIRStore OSS public released
    from airstore.client.airstore_tabular import AIRStorePathHandler

    pathmanager = PathManager()
    pathmanager.register_handler(AIRStorePathHandler())
    pathmanager.set_strict_kwargs_checking(False)
    return pathmanager
Exemple #15
0
    def __init__(
        self,
        include_default_formats: bool = True,
        path_manager: Optional[PathManager] = None,
    ) -> None:
        if path_manager is None:
            self.path_manager = PathManager()
        else:
            self.path_manager = path_manager

        self.mesh_interpreters: Deque[MeshFormatInterpreter] = deque()
        self.pointcloud_interpreters: Deque[PointcloudFormatInterpreter] = deque()

        if include_default_formats:
            self.register_default_formats()
Exemple #16
0
    def test_open_new_path_manager(self) -> None:
        with self._patch_download():
            path_manager = PathManager()
            with self.assertRaises(OSError):  # no handler registered
                f = path_manager.open(self._remote_uri, "rb")

            path_manager.register_handler(HTTPURLHandler())
            with path_manager.open(self._remote_uri, "rb") as f:
                self.assertTrue(os.path.isfile(f.name))
                self.assertTrue(f.read() != "")
Exemple #17
0
    def test_load_mtl_with_spaces_in_resource_filename(self):
        """
        Check that the texture image for materials in mtl files
        is loaded correctly even if there is a space in the file name
        e.g. material 1.png
        """
        mtl_file = "\n".join([
            "newmtl material_1",
            "map_Kd material 1.png",
            "Ka 1.000 1.000 1.000",  # white
            "Kd 1.000 1.000 1.000",  # white
            "Ks 0.000 0.000 0.000",  # black
            "Ns 10.0",
        ])
        with NamedTemporaryFile(mode="w", suffix=".mtl") as f:
            f.write(mtl_file)
            f.flush()

            material_properties, texture_files = _parse_mtl(
                Path(f.name), path_manager=PathManager(), device="cpu")

            dtype = torch.float32
            expected_materials = {
                "material_1": {
                    "ambient_color": torch.tensor([1.0, 1.0, 1.0],
                                                  dtype=dtype),
                    "diffuse_color": torch.tensor([1.0, 1.0, 1.0],
                                                  dtype=dtype),
                    "specular_color": torch.tensor([0.0, 0.0, 0.0],
                                                   dtype=dtype),
                    "shininess": torch.tensor([10.0], dtype=dtype),
                }
            }
            # Check that there is a material with name material_1
            self.assertTrue(tuple(texture_files.keys()) == ("material_1", ))
            # Check that there is an image with name material 1.png
            self.assertTrue(texture_files["material_1"] == "material 1.png")

            # Check all keys and values in dictionary are the same.
            for n1, n2 in zip(material_properties.keys(),
                              expected_materials.keys()):
                self.assertTrue(n1 == n2)
                for k1, k2 in zip(material_properties[n1].keys(),
                                  expected_materials[n2].keys()):
                    self.assertTrue(
                        torch.allclose(material_properties[n1][k1],
                                       expected_materials[n2][k2]))
Exemple #18
0
def _load_materials(
    material_names: List[str],
    f: Optional[str],
    *,
    data_dir: str,
    load_textures: bool,
    device: Device,
    path_manager: PathManager,
):
    """
    Load materials and optionally textures from the specified path.

    Args:
        material_names: a list of the material names found in the .obj file.
        f: path to the material information.
        data_dir: the directory where the material texture files are located.
        load_textures: whether textures should be loaded.
        device: Device (as str or torch.device) on which to return the new tensors.
        path_manager: PathManager object to interpret paths.

    Returns:
        material_colors: dict of properties for each material.
        texture_images: dict of material names and texture images.
    """
    if not load_textures:
        return None, None

    if not material_names or f is None:
        if material_names:
            warnings.warn("No mtl file provided")
        return None, None

    if not path_manager.exists(f):
        warnings.warn(f"Mtl file does not exist: {f}")
        return None, None

    # Texture mode uv wrap
    return load_mtl(
        f,
        material_names=material_names,
        data_dir=data_dir,
        path_manager=path_manager,
        device=device,
    )
Exemple #19
0
class TestLazyPath(unittest.TestCase):
    _pathmgr = PathManager()

    def test_materialize(self) -> None:
        f = MagicMock(return_value="test")
        x = LazyPath(f)
        f.assert_not_called()

        p = os.fspath(x)
        f.assert_called()
        self.assertEqual(p, "test")

        p = os.fspath(x)
        # should only be called once
        f.assert_called_once()
        self.assertEqual(p, "test")

    def test_join(self) -> None:
        f = MagicMock(return_value="test")
        x = LazyPath(f)
        p = os.path.join(x, "a.txt")
        f.assert_called_once()
        self.assertEqual(p, "test/a.txt")

    def test_getattr(self) -> None:
        x = LazyPath(lambda: "abc")
        with self.assertRaises(AttributeError):
            x.startswith("ab")
        _ = os.fspath(x)
        self.assertTrue(x.startswith("ab"))

    def test_PathManager(self) -> None:
        x = LazyPath(lambda: "./")
        output = self._pathmgr.ls(x)  # pyre-ignore
        output_gt = self._pathmgr.ls("./")
        self.assertEqual(sorted(output), sorted(output_gt))

    def test_getitem(self) -> None:
        x = LazyPath(lambda: "abc")
        with self.assertRaises(TypeError):
            x[0]
        _ = os.fspath(x)
        self.assertEqual(x[0], "a")
Exemple #20
0
class TestDriver:
    LEN = 100000000  # This many characters per append job
    NUM_JOBS = 10
    _pathmgr = PathManager()

    def test(self) -> None:
        with tempfile.TemporaryDirectory() as _tmpdir:
            URI = os.path.join(_tmpdir, "test.txt")

            start_time = time.time()
            printx(f"Start dispatching {self.NUM_JOBS} async write jobs "
                   f"each with {self.LEN} characters")

            FINAL_STR = ""
            with self._pathmgr.opena(URI, "a") as f:
                for i in range(self.NUM_JOBS):  # `i` goes from 0 to 9
                    FINAL_STR += f"{i}" * self.LEN
                    f.write(f"{i}" * self.LEN)

            mid_time = time.time()
            printx(
                f"Time taken to dispatch {self.NUM_JOBS} threads: {mid_time - start_time}"
            )
            printx("Calling `async_join()`")
            # We want this `async_join` call to take time. If it is instantaneous, then our
            # async write calls are not running asynchronously.
            assert self._pathmgr.async_join()
            printx(
                f"Time Python waited for `async_join()` call to finish: {time.time() - mid_time}"
            )

            assert self._pathmgr.async_close()

            with self._pathmgr.open(URI, "r") as f:
                assert f.read() == FINAL_STR

            printx("Async Writes Test finish.")
            printx(
                "Passing metric: "
                "If the `async_join()` call took more than a negligible time to complete, "
                "then Python waited for the threads to finish and the Async Writes "
                "Test SUCCEEDS. Otherwise FAILURE.")
Exemple #21
0
def _load_texture_images(
    material_names: List[str],
    data_dir: str,
    material_properties: MaterialProperties,
    texture_files: TextureFiles,
    path_manager: PathManager,
) -> Tuple[MaterialProperties, TextureImages]:
    final_material_properties = {}
    texture_images = {}

    used_material_names = list(material_names)
    if not used_material_names and material_properties:
        if len(material_properties) > 1:
            raise ValueError(
                "Multiple materials but no usemtl declarations in the obj file"
            )
        # No materials were specified in obj file and only one is in the
        # specified .mtl file, so we use it.
        used_material_names.append(next(iter(material_properties.keys())))

    # Only keep the materials referenced in the obj.
    for material_name in used_material_names:
        if material_name in texture_files:
            # Load the texture image.
            path = os.path.join(data_dir, texture_files[material_name])
            if path_manager.exists(path):
                image = (_read_image(
                    path, path_manager=path_manager, format="RGB") / 255.0)
                image = torch.from_numpy(image)
                texture_images[material_name] = image
            else:
                msg = f"Texture file does not exist: {path}"
                warnings.warn(msg)

        if material_name in material_properties:
            final_material_properties[material_name] = material_properties[
                material_name]

    return final_material_properties, texture_images
Exemple #22
0
 def _siamese_to_coco(self, siamese_json):
     assert self._output_dir
     save_json = os.path.join(self._output_dir, "siamese2coco.json")
     pm = PathManager()
     pm.mkdirs(os.path.dirname(save_json))
     with file_lock(save_json):
         if pm.exists(save_json):
             logger.warning(
                 f"Using previously cached COCO format annotations at '{save_json}'. "
                 "You need to clear the cache file if your dataset has been modified."
             )
         else:
             logger.info(
                 f"Converting annotations of dataset '{siamese_json}' to COCO format ...)"
             )
             with pm.open(siamese_json, "r") as f:
                 siamese_data = json.load(f)
             coco_data = {"data": []}
             exist_imgid = set()
             for key, datas in siamese_data.items():
                 # copy 'info', 'categories'
                 if key != "data":
                     coco_data[key] = datas
                 else:
                     for data in datas:
                         for i in range(2):
                             img_data = data[str(i)]
                             if img_data["image_id"] in exist_imgid:
                                 continue
                             else:
                                 exist_imgid.add(img_data["image_id"])
                                 coco_data[key].append(img_data)
             self._logger.info(
                 f"Number of unique images: {len(exist_imgid)}.")
             coco_data = convert_to_coco_dict(coco_data["data"],
                                              self._metadata)
             with pm.open(save_json, "w") as f:
                 json.dump(coco_data, f)
     return save_json
Exemple #23
0
class TestNativeIOAsync(unittest.TestCase):
    """
    This test class is meant to have comprehensive tests for
    `NativePathHandler`. Async functionality tests for other
    `PathHandler`-s should only require a single test since
    all `PathHandler`-s operate in the same way.
    """

    _tmpdir: Optional[str] = None
    _pathmgr = PathManager()

    @classmethod
    def setUpClass(cls) -> None:
        cls._tmpdir = tempfile.mkdtemp()

    @classmethod
    def tearDownClass(cls) -> None:
        # Cleanup temp working dir.
        if cls._tmpdir is not None:
            shutil.rmtree(cls._tmpdir)  # type: ignore

    def setUp(self) -> None:
        # Reset class variables set by methods before each test.
        self._pathmgr.set_cwd(None)
        self._pathmgr._native_path_handler._non_blocking_io_manager = None
        self._pathmgr._native_path_handler._non_blocking_io_executor = None
        self._pathmgr._async_handlers.clear()

    def test_opena(self) -> None:
        _tmpfile = os.path.join(self._tmpdir, "async.txt")
        try:
            # Write the files.
            with self._pathmgr.opena(_tmpfile + "f", "w") as f:
                f.write("f1 ")
                with self._pathmgr.opena(_tmpfile + "g", "w") as g:
                    f.write("f2 ")
                    g.write("g1 ")
                    f.write("f3 ")
                f.write("f4 ")
            with self._pathmgr.opena(_tmpfile + "f", "a") as f:
                f.write("f5 ")
            F_STR = "f1 f2 f3 f4 f5 "
            G_STR = "g1 "

            # Test that `PathManager._async_handlers` keeps track of all
            # `PathHandler`-s where `opena` is used.
            self.assertCountEqual(
                [type(handler) for handler in self._pathmgr._async_handlers],
                [type(self._pathmgr._native_path_handler)],
            )
            # Test that 2 paths were properly logged in `NonBlockingIOManager`.
            manager = self._pathmgr._native_path_handler._non_blocking_io_manager
            self.assertEqual(len(manager._path_to_data), 2)
        finally:
            # Join the threads to wait for files to be written.
            self.assertTrue(self._pathmgr.async_close())

        # Check that both files were asynchronously written and written in order.
        with self._pathmgr.open(_tmpfile + "f", "r") as f:
            self.assertEqual(f.read(), F_STR)
        with self._pathmgr.open(_tmpfile + "g", "r") as g:
            self.assertEqual(g.read(), G_STR)
        # Test that both `NonBlockingIO` objects `f` and `g` are finally closed.
        self.assertEqual(len(manager._path_to_data), 0)

    def test_async_join_behavior(self) -> None:
        _tmpfile = os.path.join(self._tmpdir, "async.txt")
        _tmpfile_contents = "Async Text"
        try:
            for _ in range(1):  # Opens 1 thread
                with self._pathmgr.opena(_tmpfile + "1", "w") as f:
                    f.write(f"{_tmpfile_contents}-1")
            for _ in range(2):  # Opens 2 threads
                with self._pathmgr.opena(_tmpfile + "2", "w") as f:
                    f.write(f"{_tmpfile_contents}-2")
            for _ in range(3):  # Opens 3 threads
                with self._pathmgr.opena(_tmpfile + "3", "w") as f:
                    f.write(f"{_tmpfile_contents}-3")
            _path_to_data = (
                self._pathmgr._native_path_handler._non_blocking_io_manager._path_to_data
            )
            # Join the threads for the 1st and 3rd file and ensure threadpool completed.
            _path_to_data_copy = dict(_path_to_data)
            self.assertTrue(
                self._pathmgr.async_join(
                    _tmpfile + "1", _tmpfile + "3"
                )  # Removes paths from `_path_to_io`.
            )
            self.assertFalse(_path_to_data_copy[_tmpfile + "1"].thread.is_alive())
            self.assertFalse(_path_to_data_copy[_tmpfile + "3"].thread.is_alive())
            self.assertEqual(len(_path_to_data), 1)  # 1 file remaining
        finally:
            # Join all the remaining threads
            _path_to_data_copy = dict(_path_to_data)
            self.assertTrue(self._pathmgr.async_close())

        # Ensure data cleaned up.
        self.assertFalse(_path_to_data_copy[_tmpfile + "2"].thread.is_alive())
        self.assertEqual(len(self._pathmgr._async_handlers), 0)
        self.assertEqual(len(_path_to_data), 0)  # 0 files remaining

    def test_opena_normpath(self) -> None:
        _filename = "async.txt"
        # `_file1` and `_file2` should represent the same path but have different
        # string representations.
        _file1 = os.path.join(self._tmpdir, _filename)
        _file2 = os.path.join(self._tmpdir, ".", _filename)
        self.assertNotEqual(_file1, _file2)
        try:
            _file1_text = "File1 text"
            _file2_text = "File2 text"
            with self._pathmgr.opena(_file1, "w") as f:
                f.write(_file1_text)
            with self._pathmgr.opena(_file2, "a") as f:
                f.write(_file2_text)
            _path_to_data = (
                self._pathmgr._native_path_handler._non_blocking_io_manager._path_to_data
            )
            # Check that `file2` is marked as the same file as `file1`.
            self.assertEqual(len(_path_to_data), 1)
            self.assertTrue(self._pathmgr.async_join())
            # Check that both file paths give the same file contents.
            with self._pathmgr.open(_file1, "r") as f:
                self.assertEqual(f.read(), _file1_text + _file2_text)
            with self._pathmgr.open(_file2, "r") as f:
                self.assertEqual(f.read(), _file1_text + _file2_text)
        finally:
            self.assertTrue(self._pathmgr.async_close())

    def test_async_consecutive_join_calls(self) -> None:
        _file = os.path.join(self._tmpdir, "async.txt")
        try:
            self.assertTrue(self._pathmgr.async_join())
            try:
                with self._pathmgr.opena(_file, "w") as f:
                    f.write("1")
            finally:
                self.assertTrue(self._pathmgr.async_join())
            with self._pathmgr.open(_file, "r") as f:
                self.assertEqual(f.read(), "1")

            try:
                f = self._pathmgr.opena(_file, "a")
                f.write("2")
                f.close()
            finally:
                self.assertTrue(self._pathmgr.async_join())
            with self._pathmgr.open(_file, "r") as f:
                self.assertEqual(f.read(), "12")
        finally:
            self.assertTrue(self._pathmgr.async_close())

    def test_opena_mode_restriction(self) -> None:
        _file = os.path.join(self._tmpdir, "async.txt")
        with self.assertRaises(ValueError):
            self._pathmgr.opena(_file, "r")
        with self.assertRaises(ValueError):
            self._pathmgr.opena(_file, "rb")
        with self.assertRaises(ValueError):
            self._pathmgr.opena(_file, "wrb")

    def test_opena_args_passed_correctly(self) -> None:
        _file = os.path.join(self._tmpdir, "async.txt")
        try:
            # Make sure that `opena` args are used correctly by using
            # different newline args.
            with self._pathmgr.opena(_file, "w", newline="\r\n") as f:
                f.write("1\n")
            with self._pathmgr.opena(_file, "a", newline="\n") as f:
                f.write("2\n3")
        finally:
            self.assertTrue(self._pathmgr.async_close())

        # Read the raw file data without converting newline endings to see
        # if the `opena` args were used correctly.
        with self._pathmgr.open(_file, "r", newline="") as f:
            self.assertEqual(f.read(), "1\r\n2\n3")

    def test_opena_with_callback(self) -> None:
        _file_tmp = os.path.join(self._tmpdir, "async.txt.tmp")
        _file = os.path.join(self._tmpdir, "async.txt")
        _data = "Asynchronously written text"

        def cb():
            # Insert a test to make sure `_file_tmp` was closed before
            # the callback is called.
            with open(_file_tmp, "r") as f:
                self.assertEqual(f.read(), _data)
            self._pathmgr.copy(_file_tmp, _file, overwrite=True)

        mock_cb = Mock(side_effect=cb)

        try:
            with self._pathmgr.opena(
                _file_tmp, "w", callback_after_file_close=mock_cb
            ) as f:
                f.write(_data)
        finally:
            self.assertTrue(self._pathmgr.async_close())
        # Callback should have been called exactly once.
        mock_cb.assert_called_once()

        # Data should have been written to both `_file_tmp` and `_file`.
        with open(_file_tmp, "r") as f:
            self.assertEqual(f.read(), _data)
        with open(_file, "r") as f:
            self.assertEqual(f.read(), _data)

    def test_opena_with_callback_only_called_once(self) -> None:
        _file_tmp = os.path.join(self._tmpdir, "async.txt.tmp")

        mock_cb = Mock()

        # Callback should be called once even if `close` is called
        # multiple times.
        try:
            f = self._pathmgr.opena(_file_tmp, "w", callback_after_file_close=mock_cb)
            f.close()
            f.close()
            f.close()
        finally:
            self.assertTrue(self._pathmgr.async_close())
        # Callback should have been called exactly once.
        mock_cb.assert_called_once()

    def test_async_custom_executor(self) -> None:
        # At first, neither manager nor executor are set.
        self.assertIsNone(self._pathmgr._native_path_handler._non_blocking_io_manager)
        self.assertIsNone(self._pathmgr._native_path_handler._non_blocking_io_executor)
        # Then, override the `NativePathHandler` and set a custom executor.
        executor = concurrent.futures.ThreadPoolExecutor(
            max_workers=128, thread_name_prefix="my prefix"
        )
        ph = NativePathHandler(async_executor=executor)
        self._pathmgr.register_handler(ph, allow_override=True)
        self.assertEqual(ph, self._pathmgr._native_path_handler)

        # Opening a file with `opena` initializes the manager with the
        # executor.
        _file = os.path.join(self._tmpdir, "async.txt")
        try:
            with self._pathmgr.opena(_file, "w") as f:
                f.write("Text")
            # Make sure the manager's executor is the same as the user's.
            self.assertEqual(
                executor,
                self._pathmgr._native_path_handler._non_blocking_io_manager._pool,
            )
        finally:
            self.assertTrue(self._pathmgr.async_close())

    def test_non_blocking_io_seekable(self) -> None:
        _file = os.path.join(self._tmpdir, "async.txt")
        # '^' marks the current position in stream

        # Test seek.
        try:
            with self._pathmgr.opena(_file, "wb") as f:
                f.write(b"012345")  # file = 012345^
                f.seek(1)  # file = 0^12345
                f.write(b"##")  # file = 0##^345
        finally:
            self.assertTrue(self._pathmgr.async_join())
            with self._pathmgr.open(_file, "rb") as f:
                self.assertEqual(f.read(), b"0##345")

        # Test truncate.
        try:
            with self._pathmgr.opena(_file, "wb") as f:
                f.write(b"012345")  # file = 012345^
                f.seek(2)  # file = 01^2345
                f.truncate()  # file = 01^
        finally:
            self.assertTrue(self._pathmgr.async_join())
            with self._pathmgr.open(_file, "rb") as f:
                self.assertEqual(f.read(), b"01")

        # Big test for seek and truncate.
        try:
            with self._pathmgr.opena(_file, "wb") as f:
                f.write(b"0123456789")  # file = 0123456789^
                f.seek(2)  # file = 01^23456789
                f.write(b"##")  # file = 01##^456789
                f.seek(3, io.SEEK_CUR)  # file = 01##456^789
                f.truncate()  # file = 01##456^
                f.write(b"$")  # file = 01##456$^
        finally:
            self.assertTrue(self._pathmgr.async_join())
            with self._pathmgr.open(_file, "rb") as f:
                self.assertEqual(f.read(), b"01##456$")

        # Test NOT tellable.
        try:
            with self._pathmgr.opena(_file, "wb") as f:
                with self.assertRaises(ValueError):
                    f.tell()
        finally:
            self._pathmgr.async_close()
Exemple #24
0
def load_ply(f, path_manager: Optional[PathManager] = None):
    """
    Load the data from a .ply file.

    Example .ply file format:

    ply
    format ascii 1.0           { ascii/binary, format version number }
    comment made by Greg Turk  { comments keyword specified, like all lines }
    comment this file is a cube
    element vertex 8           { define "vertex" element, 8 of them in file }
    property float x           { vertex contains float "x" coordinate }
    property float y           { y coordinate is also a vertex property }
    property float z           { z coordinate, too }
    element face 6             { there are 6 "face" elements in the file }
    property list uchar int vertex_index { "vertex_indices" is a list of ints }
    end_header                 { delimits the end of the header }
    0 0 0                      { start of vertex list }
    0 0 1
    0 1 1
    0 1 0
    1 0 0
    1 0 1
    1 1 1
    1 1 0
    4 0 1 2 3                  { start of face list }
    4 7 6 5 4
    4 0 4 5 1
    4 1 5 6 2
    4 2 6 7 3
    4 3 7 4 0

    Args:
        f:  A binary or text file-like object (with methods read, readline,
            tell and seek), a pathlib path or a string containing a file name.
            If the ply file is in the binary ply format rather than the text
            ply format, then a text stream is not supported.
            It is easiest to use a binary stream in all cases.
        path_manager: PathManager for loading if f is a str.


    Returns:
        verts: FloatTensor of shape (V, 3).
        faces: LongTensor of vertex indices, shape (F, 3).
    """
    if path_manager is None:
        path_manager = PathManager()
    header, elements = _load_ply_raw(f, path_manager=path_manager)

    vertex = elements.get("vertex", None)
    if vertex is None:
        raise ValueError("The ply file has no vertex element.")

    face = elements.get("face", None)
    if face is None:
        raise ValueError("The ply file has no face element.")

    if len(vertex) and (not isinstance(vertex, np.ndarray) or vertex.ndim != 2
                        or vertex.shape[1] != 3):
        raise ValueError("Invalid vertices in file.")
    verts = _make_tensor(vertex, cols=3, dtype=torch.float32)

    face_head = next(head for head in header.elements if head.name == "face")
    if len(face_head.properties
           ) != 1 or face_head.properties[0].list_size_type is None:
        raise ValueError("Unexpected form of faces data.")
    # face_head.properties[0].name is usually "vertex_index" or "vertex_indices"
    # but we don't need to enforce this.

    if not len(face):
        # pyre-fixme[28]: Unexpected keyword argument `size`.
        faces = torch.zeros(size=(0, 3), dtype=torch.int64)
    elif isinstance(face,
                    np.ndarray) and face.ndim == 2:  # Homogeneous elements
        if face.shape[1] < 3:
            raise ValueError("Faces must have at least 3 vertices.")
        face_arrays = [
            face[:, [0, i + 1, i + 2]] for i in range(face.shape[1] - 2)
        ]
        faces = torch.LongTensor(np.vstack(face_arrays))
    else:
        face_list = []
        for face_item in face:
            if face_item.ndim != 1:
                raise ValueError("Bad face data.")
            if face_item.shape[0] < 3:
                raise ValueError("Faces must have at least 3 vertices.")
            for i in range(face_item.shape[0] - 2):
                face_list.append(
                    [face_item[0], face_item[i + 1], face_item[i + 2]])
        # pyre-fixme[6]: Expected `dtype` for 3rd param but got `Type[torch.int64]`.
        faces = _make_tensor(face_list, cols=3, dtype=torch.int64)

    _check_faces_indices(faces, max_index=verts.shape[0])
    return verts, faces
Exemple #25
0
 def _open(self, path, mode="r", **kwargs):
     return PathManager.open(self._get_local_path(path), mode, **kwargs)
Exemple #26
0
class LayoutParserHandler(PathHandler):
    """
    Resolve anything that's in LayoutParser model zoo.
    """

    PREFIX = "lp://"

    def _get_supported_prefixes(self):
        return [self.PREFIX]

    def _get_local_path(self, path, **kwargs):
        model_name = path[len(self.PREFIX):]
        dataset_name, *model_name, data_type = model_name.split('/')

        if data_type == 'weight':
            model_url = MODEL_CATALOG[dataset_name]['/'.join(model_name)]
        elif data_type == 'config':
            model_url = CONFIG_CATALOG[dataset_name]['/'.join(model_name)]
        else:
            raise ValueError(f"Unknown data_type {data_type}")
        return PathManager.get_local_path(model_url, **kwargs)

    def _open(self, path, mode="r", **kwargs):
        return PathManager.open(self._get_local_path(path), mode, **kwargs)


PathManager = PathManagerBase()
PathManager.register_handler(DropboxHandler())
PathManager.register_handler(LayoutParserHandler())
Exemple #27
0
def load_obj(
    f,
    load_textures=True,
    create_texture_atlas: bool = False,
    texture_atlas_size: int = 4,
    texture_wrap: Optional[str] = "repeat",
    device="cpu",
    path_manager: Optional[PathManager] = None,
):
    """
    Load a mesh from a .obj file and optionally textures from a .mtl file.
    Currently this handles verts, faces, vertex texture uv coordinates, normals,
    texture images and material reflectivity values.

    Note .obj files are 1-indexed. The tensors returned from this function
    are 0-indexed. OBJ spec reference: http://www.martinreddy.net/gfx/3d/OBJ.spec

    Example .obj file format:
    ::
        # this is a comment
        v 1.000000 -1.000000 -1.000000
        v 1.000000 -1.000000 1.000000
        v -1.000000 -1.000000 1.000000
        v -1.000000 -1.000000 -1.000000
        v 1.000000 1.000000 -1.000000
        vt 0.748573 0.750412
        vt 0.749279 0.501284
        vt 0.999110 0.501077
        vt 0.999455 0.750380
        vn 0.000000 0.000000 -1.000000
        vn -1.000000 -0.000000 -0.000000
        vn -0.000000 -0.000000 1.000000
        f 5/2/1 1/2/1 4/3/1
        f 5/1/1 4/3/1 2/4/1

    The first character of the line denotes the type of input:
    ::
        - v is a vertex
        - vt is the texture coordinate of one vertex
        - vn is the normal of one vertex
        - f is a face

    Faces are interpreted as follows:
    ::
        5/2/1 describes the first vertex of the first triange
        - 5: index of vertex [1.000000 1.000000 -1.000000]
        - 2: index of texture coordinate [0.749279 0.501284]
        - 1: index of normal [0.000000 0.000000 -1.000000]

    If there are faces with more than 3 vertices
    they are subdivided into triangles. Polygonal faces are assummed to have
    vertices ordered counter-clockwise so the (right-handed) normal points
    out of the screen e.g. a proper rectangular face would be specified like this:
    ::
        0_________1
        |         |
        |         |
        3 ________2

    The face would be split into two triangles: (0, 2, 1) and (0, 3, 2),
    both of which are also oriented counter-clockwise and have normals
    pointing out of the screen.

    Args:
        f: A file-like object (with methods read, readline, tell, and seek),
           a pathlib path or a string containing a file name.
        load_textures: Boolean indicating whether material files are loaded
        create_texture_atlas: Bool, If True a per face texture map is created and
            a tensor `texture_atlas` is also returned in `aux`.
        texture_atlas_size: Int specifying the resolution of the texture map per face
            when `create_texture_atlas=True`. A (texture_size, texture_size, 3)
            map is created per face.
        texture_wrap: string, one of ["repeat", "clamp"]. This applies when computing
            the texture atlas.
            If `texture_mode="repeat"`, for uv values outside the range [0, 1] the integer part
            is ignored and a repeating pattern is formed.
            If `texture_mode="clamp"` the values are clamped to the range [0, 1].
            If None, then there is no transformation of the texture values.
        device: string or torch.device on which to return the new tensors.
        path_manager: optionally a PathManager object to interpret paths.

    Returns:
        6-element tuple containing

        - **verts**: FloatTensor of shape (V, 3).
        - **faces**: NamedTuple with fields:
            - verts_idx: LongTensor of vertex indices, shape (F, 3).
            - normals_idx: (optional) LongTensor of normal indices, shape (F, 3).
            - textures_idx: (optional) LongTensor of texture indices, shape (F, 3).
              This can be used to index into verts_uvs.
            - materials_idx: (optional) List of indices indicating which
              material the texture is derived from for each face.
              If there is no material for a face, the index is -1.
              This can be used to retrieve the corresponding values
              in material_colors/texture_images after they have been
              converted to tensors or Materials/Textures data
              structures - see textures.py and materials.py for
              more info.
        - **aux**: NamedTuple with fields:
            - normals: FloatTensor of shape (N, 3)
            - verts_uvs: FloatTensor of shape (T, 2), giving the uv coordinate per
              vertex. If a vertex is shared between two faces, it can have
              a different uv value for each instance. Therefore it is
              possible that the number of verts_uvs is greater than
              num verts i.e. T > V.
              vertex.
            - material_colors: if `load_textures=True` and the material has associated
              properties this will be a dict of material names and properties of the form:

              .. code-block:: python

                  {
                      material_name_1:  {
                          "ambient_color": tensor of shape (1, 3),
                          "diffuse_color": tensor of shape (1, 3),
                          "specular_color": tensor of shape (1, 3),
                          "shininess": tensor of shape (1)
                      },
                      material_name_2: {},
                      ...
                  }

              If a material does not have any properties it will have an
              empty dict. If `load_textures=False`, `material_colors` will None.

            - texture_images: if `load_textures=True` and the material has a texture map,
              this will be a dict of the form:

              .. code-block:: python

                  {
                      material_name_1: (H, W, 3) image,
                      ...
                  }
              If `load_textures=False`, `texture_images` will None.
            - texture_atlas: if `load_textures=True` and `create_texture_atlas=True`,
              this will be a FloatTensor of the form: (F, texture_size, textures_size, 3)
              If the material does not have a texture map, then all faces
              will have a uniform white texture.  Otherwise `texture_atlas` will be
              None.
    """
    data_dir = "./"
    # pyre-fixme[6]: Expected `Union[typing.Type[typing.Any],
    #  typing.Tuple[typing.Type[typing.Any], ...]]` for 2nd param but got `Any`.
    if isinstance(f, (str, bytes, os.PathLike)):
        # pyre-fixme[6]: Expected `_PathLike[Variable[typing.AnyStr <: [str,
        #  bytes]]]` for 1st param but got `Union[_PathLike[typing.Any], bytes, str]`.
        data_dir = os.path.dirname(f)
    if path_manager is None:
        path_manager = PathManager()
    with _open_file(f, path_manager, "r") as f:
        return _load_obj(
            f,
            data_dir=data_dir,
            load_textures=load_textures,
            create_texture_atlas=create_texture_atlas,
            texture_atlas_size=texture_atlas_size,
            texture_wrap=texture_wrap,
            path_manager=path_manager,
            device=device,
        )
Exemple #28
0
import struct
import unittest
from io import BytesIO, StringIO
from tempfile import NamedTemporaryFile, TemporaryFile

import numpy as np
import pytorch3d.io.ply_io
import torch
from common_testing import TestCaseMixin
from iopath.common.file_io import PathManager
from pytorch3d.io import IO
from pytorch3d.io.ply_io import load_ply, save_ply
from pytorch3d.structures import Pointclouds
from pytorch3d.utils import torus

global_path_manager = PathManager()


def _load_ply_raw(stream):
    return pytorch3d.io.ply_io._load_ply_raw(stream, global_path_manager)


CUBE_PLY_LINES = [
    "ply",
    "format ascii 1.0",
    "comment made by Greg Turk",
    "comment this file is a cube",
    "element vertex 8",
    "property float x",
    "property float y",
    "property float z",
Exemple #29
0
# Copyright (c) Facebook, Inc. and its affiliates.
from iopath.common.file_io import HTTPURLHandler, OneDrivePathHandler, PathHandler
from iopath.common.file_io import PathManager as PathManagerBase

__all__ = ["PathManager", "PathHandler"]

PathManager = PathManagerBase()
"""
This is a detectron2 project-specific PathManager.
We try to stay away from global PathManager in fvcore as it
introduces potential conflicts among other libraries.
"""


class Detectron2Handler(PathHandler):
    """
    Resolve anything that's hosted under detectron2's namespace.
    """

    PREFIX = "detectron2://"
    S3_DETECTRON2_PREFIX = "https://dl.fbaipublicfiles.com/detectron2/"

    def _get_supported_prefixes(self):
        return [self.PREFIX]

    def _get_local_path(self, path):
        name = path[len(self.PREFIX):]
        return PathManager.get_local_path(self.S3_DETECTRON2_PREFIX + name)

    def _open(self, path, mode="r", **kwargs):
        return PathManager.open(self._get_local_path(path), mode, **kwargs)
Exemple #30
0
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from iopath.common.file_io import HTTPURLHandler
from iopath.common.file_io import PathManager as PathManagerBase

# A trick learned from https://github.com/facebookresearch/detectron2/blob/65faeb4779e4c142484deeece18dc958c5c9ad18/detectron2/utils/file_io.py#L3


class DropboxHandler(HTTPURLHandler):
    """
    Supports download and file check for dropbox links
    """
    def _get_supported_prefixes(self):
        return ["https://www.dropbox.com"]

    def _isfile(self, path):
        return path in self.cache_map


PathManager = PathManagerBase()
PathManager.register_handler(DropboxHandler())