def test_png_ext(self, disabled_backend): with disable_import(*disabled_backend): from plums.dataflow.io.tile._backend import _load_png array = _load_png(Path(__file__)[:-1] / '_data/test_png.png') array_noext = _load_png(Path(__file__)[:-1] / '_data/test_png') assert np.array_equal(array, array_noext)
def test_non_commutative_addition_file(self, make_path): path_left = make_path('dummy/path/left') path_right = make_path('dummy/path/right.ext') with pytest.raises( ValueError, match='It is impossible to left join a file-path: '): Path(path_right) + Path(path_left)
def _deserialize(self, data): """Update internals from a JSON serialized data dictionary.""" # Initialize structures self._tiles_index = {} self._tiles_database = defaultdict(tuple) self._annotations_index = {} self._annotations_database = defaultdict(tuple) # Deserialize tiles database and index for path_dict in data['tile'].values(): group = tuple(path_dict['group']) for path in path_dict['paths']: tile_path = Path(path['path']) tile_path.match = path['match'] self._tiles_index[tile_path] = group self._tiles_database[group] += (tile_path, ) # Deserialize annotation database and index for path_dict in data['annotation'].values(): group = tuple(path_dict['group']) for path in path_dict['paths']: annotation_path = Path(path['path']) annotation_path.match = path['match'] self._annotations_index[annotation_path] = group self._annotations_database[group] += (annotation_path, ) # Deserialize group index self._group_index = [tuple(group) for group in data['group_index']]
def test_checkpoint(self, tmp_path): with pytest.raises(OSError): c = Checkpoint('', '') with open(str(Path(tmp_path) / '45.weight'), 'w') as f: f.write('') with pytest.raises(PlumsValidationError): c = Checkpoint('some_name', Path(tmp_path) / '45.weight', epoch=45, hash='') c = Checkpoint('some_name', Path(tmp_path) / '45.weight', epoch=45, hash='00236a2ae558018ed13b5222ef1bd977') assert c.name == 'some_name' assert c.path == Path(tmp_path) / '45.weight' assert c.epoch == 45 assert c.hash == '00236a2ae558018ed13b5222ef1bd977' c = Checkpoint('some_name', Path(tmp_path) / '45.weight', epoch=45) assert c.name == 'some_name' assert c.path == Path(tmp_path) / '45.weight' assert c.epoch == 45 assert c.hash == 'd41d8cd98f00b204e9800998ecf8427e' with open(str(Path(tmp_path) / '46.weight'), 'w') as f: f.write('0') assert c == c assert c == Checkpoint('some_other_name', Path(tmp_path) / '46.weight', epoch=45, hash='d41d8cd98f00b204e9800998ecf8427e') assert c != Checkpoint('some_name', Path(tmp_path) / '46.weight', epoch=46, hash='d41d8cd98f00b204e9800998ecf8427e') assert c != Checkpoint('some_name', Path(tmp_path) / '46.weight', epoch=45)
def test_make_part_dir_absolute(self, make_path): path = make_path('/dummy/absolute/dir/path') path_instance = Path(path) assert path_instance.parts == ('/', 'dummy', 'absolute', 'dir', 'path') assert path_instance.filename == '' assert path_instance.ext == '' path = make_path('/dummy/absolute//dir/path') path_instance = Path(path) assert path_instance.parts == ('/', 'dummy', 'absolute', 'dir', 'path') assert path_instance.filename == '' assert path_instance.ext == '' path = make_path('/dummy///absolute/dir/path') path_instance = Path(path) assert path_instance.parts == ('/', 'dummy', 'absolute', 'dir', 'path') assert path_instance.filename == '' assert path_instance.ext == '' with pytest.raises( TypeError, match= 'Can not change filename as self is represents a directory'): _ = path_instance.with_filename('filename') with pytest.raises( TypeError, match= 'Can not change extension as self is represents a directory'): _ = path_instance.with_ext('json') with pytest.raises( ValueError, match='Invalid file provided: Expected a single file, got'): _ = path_instance.with_file('some/invalid/file.json') with pytest.raises( ValueError, match='Invalid file provided: Expected a single file, got'): _ = path_instance.with_file('file') with pytest.raises( ValueError, match='Invalid file provided: Expected a single file, got'): _ = path_instance.with_file('file.') with pytest.raises( ValueError, match='Invalid file provided: Expected a single file, got'): _ = path_instance.with_file('/file.json') new_path = path_instance.with_file('filename.json') assert new_path.parts == ('/', 'dummy', 'absolute', 'dir', 'path', 'filename.json') assert path_instance.parts == ('/', 'dummy', 'absolute', 'dir', 'path') assert new_path.filename == 'filename' assert path_instance.filename == '' assert new_path.ext == 'json' assert path_instance.ext == ''
def test_png_no_backend(self): with disable_import('lycon', 'cv2', 'PIL'): import plums.dataflow.io.tile._backend with pytest.raises(RuntimeError, match='No backend available to open PNG image.'): plums.dataflow.io.tile._backend._load_png(Path(__file__)[:-1] / '_data/test_png.png') with pytest.raises(RuntimeError, match='No backend available to open PNG image.'): plums.dataflow.io.tile._backend._load_png(Path(__file__)[:-1] / '_data/test_png')
def test_resolver_init(): resolver = PathResolver('data/images/{dataset}/{aoi}/{source}/{tile}.jpg') assert resolver._regex.pattern \ == r'data/images/(?P<dataset>[^/]+)/(?P<aoi>[^/]+)/(?P<source>[^/]+)/(?P<tile>[^/]+)\.jpg' assert resolver._prefix == Path('data/images/') resolver = PathResolver('/home/user/{dataset}/{aoi}/{source}/{tile}.jpg') assert resolver._regex.pattern \ == r'/home/user/(?P<dataset>[^/]+)/(?P<aoi>[^/]+)/(?P<source>[^/]+)/(?P<tile>[^/]+)\.jpg' assert resolver._prefix == Path('/home/user')
def test_contain_absolute(self, make_path): path = make_path('/a/long/dummy/absolute/dir/path') path_instance = Path(path) assert path in path_instance assert path_instance in Path(path) assert 'dummy/absolute' in path_instance assert path_instance not in Path('dummy/absolute') assert 'dummy/absolute/path' not in path_instance assert path_instance not in Path('dummy/absolute/path') assert 'absolute/dummy' not in path_instance assert path_instance not in Path('absolute/dummy')
def test_contain(self, make_path): path = make_path('a/long/dummy/relative/dir/path') path_instance = Path(path) assert path in path_instance assert path_instance in Path(path) assert 'dummy/relative' in path_instance assert path_instance not in Path('dummy/relative') assert 'dummy/relative/path' not in path_instance assert path_instance not in Path('dummy/relative/path') assert 'relative/dummy' not in path_instance assert path_instance not in Path('relative/dummy')
def test_jpeg_no_backend(self): with disable_import('plums.dataflow.io.tile._vendor.turbojpeg', 'lycon', 'cv2'): import plums.dataflow.io.tile._backend # Disable Pillow after import to avoid early raise plums.dataflow.io.tile._backend._HAS_PILLOW = False with pytest.raises(RuntimeError, match='No backend available to open JPG image.'): plums.dataflow.io.tile._backend._load_jpg(Path(__file__)[:-1] / '_data/test_jpg.jpg') with pytest.raises(RuntimeError, match='No backend available to open JPG image.'): plums.dataflow.io.tile._backend._load_jpg(Path(__file__)[:-1] / '_data/test_jpg')
def test_png_jpg(self, disabled_backend): with disable_import(*disabled_backend): from plums.dataflow.io.tile._backend import _load_png, _load_jpg array_png = _load_png(Path(__file__)[:-1] / '_data/test_png.png').astype(np.float64) array_noext_png = _load_png(Path(__file__)[:-1] / '_data/test_png').astype(np.float64) array_jpg = _load_jpg(Path(__file__)[:-1] / '_data/test_jpg.jpg').astype(np.float64) array_noext_jpg = _load_jpg(Path(__file__)[:-1] / '_data/test_jpg').astype(np.float64) assert psnr(array_jpg, array_png) > 35 assert psnr(array_jpg, array_noext_png) > 35 assert psnr(array_noext_jpg, array_png) > 35 assert psnr(array_noext_jpg, array_noext_png) > 35
def test_producer(self, tmp_path): with open(str(Path(tmp_path) / '0.conf'), 'w') as f: f.write('0') with open(str(Path(tmp_path) / '1.conf'), 'w') as f: f.write('') with open(str(Path(tmp_path) / '0.1.conf'), 'w') as f: f.write('0') with pytest.raises(OSError): p = Producer('', 'py_pa', '1.0.0', Path(tmp_path) / '2.conf') with pytest.raises(ValueError): p = Producer('', 'version_format', '1.0.0', Path(tmp_path) / '0.conf') p = Producer('', 'py_pa', '1.0.0', Path(tmp_path) / '0.conf') p_eq = Producer('', 'py_pa', '1.0.0', Path(tmp_path) / '1.conf') p_seq = Producer('', 'py_pa', '1.0.0', Path(tmp_path) / '0.conf') p_dup = Producer('', 'py_pa', '1.0.0', Path(tmp_path) / '0.1.conf') p_gt = Producer('', 'py_pa', '2.0.0', Path(tmp_path) / '0.conf') assert p == p_eq assert p <= p_eq assert p >= p_eq assert not p < p_eq assert not p > p_eq assert not p.strict_equals(p_eq) assert not p_eq.strict_equals(p) assert p == p_seq assert p <= p_seq assert p >= p_seq assert not p < p_seq assert not p > p_seq assert p.strict_equals(p_seq) assert p_seq.strict_equals(p) assert p == p_dup assert p <= p_dup assert p >= p_dup assert not p < p_dup assert not p > p_dup assert p.strict_equals(p_dup) assert p_dup.strict_equals(p) assert p != p_gt assert p <= p_gt assert not p >= p_gt assert p < p_gt assert not p > p_gt assert not p.strict_equals(p_gt) assert not p_gt.strict_equals(p)
def test_make_part_file_absolute(self, make_path): # noqa: R701 path = make_path('/dummy/absolute/file/path/file.ext') path_instance = Path(path) assert path_instance.parts == ('/', 'dummy', 'absolute', 'file', 'path', 'file.ext') assert path_instance.filename == 'file' assert path_instance.ext == 'ext' path = make_path('/dummy/absolute/file//path/file.ext') path_instance = Path(path) assert path_instance.parts == ('/', 'dummy', 'absolute', 'file', 'path', 'file.ext') assert path_instance.filename == 'file' assert path_instance.ext == 'ext' path = make_path('/dummy///absolute/file/path/file.ext') path_instance = Path(path) assert path_instance.parts == ('/', 'dummy', 'absolute', 'file', 'path', 'file.ext') assert path_instance.filename == 'file' assert path_instance.ext == 'ext' new_path = path_instance.with_filename('filename') assert new_path.parts == ('/', 'dummy', 'absolute', 'file', 'path', 'filename.ext') assert path_instance.parts == ('/', 'dummy', 'absolute', 'file', 'path', 'file.ext') assert new_path.filename == 'filename' assert path_instance.filename == 'file' assert new_path.ext == 'ext' assert path_instance.ext == 'ext' new_path = path_instance.with_ext('json') assert new_path.parts == ('/', 'dummy', 'absolute', 'file', 'path', 'file.json') assert path_instance.parts == ('/', 'dummy', 'absolute', 'file', 'path', 'file.ext') assert new_path.filename == 'file' assert path_instance.filename == 'file' assert new_path.ext == 'json' assert path_instance.ext == 'ext' new_path = path_instance.with_file('filename.json') assert new_path.parts == ('/', 'dummy', 'absolute', 'file', 'path', 'filename.json') assert path_instance.parts == ('/', 'dummy', 'absolute', 'file', 'path', 'file.ext') assert new_path.filename == 'filename' assert path_instance.filename == 'file' assert new_path.ext == 'json' assert path_instance.ext == 'ext'
def test_root_to_anchor(self, make_path): path = Path(make_path('/some/absolute/path/to/somewhere.ext')) for i, e in enumerate(path[1:]): assert path.root_to_anchor(e) == path[:i + 1] with pytest.raises(ValueError): path.root_to_anchor('non_existing_anchor') path = Path(make_path('/some/absolute/path/to/path/to/somewhere.ext')) for i, e in enumerate(path[1:]): assert path.root_to_anchor(path[i + 1:]) == path[:i + 1] assert path.root_to_anchor('path/to') == path[:5]
def copy(source, destination, lazy=True, src_hash=None, dst_hash=None, checksum=md5_checksum): """Copy file from *source* to *destination*. If ``lazy`` is set to ``True`` and ``destination`` exists, the file **will be** copied **if and only if** *destination* is a different file (content-wise) than *source*. Args: source (PathLike): The first file to compare. destination (PathLike): The second file to compare. lazy (bool): Optional. Default to ``True``. If set to ``True``, actual copy performed if *destination* exists and has a different content than *source*. src_hash (str): Optional. Default to ``None``. If provided, it is used in place of the first file checksum to compare content. dst_hash (str): Optional. Default to ``None``. If provided, it is used in place of the second file checksum to compare content. checksum (Callable): A function which computes a checksum from a |Path|. Raises: OSError: If something went wrong during copy. """ source = Path(source) destination = Path(destination) if source == destination: return if not source.is_file(): raise OSError('Invalid source: {} is not a file.'.format(source)) if destination.exists() and not destination.is_file(): raise OSError( 'Invalid destination: {} exists but is not a file.'.format( destination)) if not destination.exists(): lazy = False if (destination.is_file() and lazy) and is_duplicate( source, destination, src_hash, dst_hash, checksum=checksum): return shutil.copy(str(source), str(destination))
def make_dict_structure_from_tree(path): """Construct a nested dictionary structure from a filesystem tree. Args: path (Pathlike): The filesystem tree root. Returns: dict: A nested dict structure where each element is a key to its |Path| for files and to another dict for directories. """ path = Path(path) tree = path.rglob('*') dict_tree = {} for element in tree: if not element.is_symlink() and element.is_dir(): value = {} elif element.is_symlink() or element.is_file(): value = element else: raise ValueError('Invalid element in filesystem tree: ' '{} is neither a file/symlink nor a directory.'.format(element)) dictionary = dict_tree for part in element.anchor_to_path(path)[:-1]: if part == '.': continue dictionary = dictionary[part] dictionary[str(element[-1])] = value return dict_tree
def __init__(self, prefix): # Store parameters self._path = Path(user_cache_dir(appname='plums')) / prefix self._resolver = PathResolver('{key}.json') # Create prefix if it does not exist self._path.mkdir(parents=True, exist_ok=True)
def _make_path(path): if request.param == 'pathlib': return pathlib.Path(str(path)) elif request.param == 'Path': return Path(str(path)) else: return str(path)
def __init__(self): self._transformer = PatternTransformer() with open(str(Path(__file__)[:-1] / 'grammar.lark'), 'r') as f: self._parser = Lark(f, start='pattern', parser='lalr', maybe_placeholders=True, transformer=self._transformer)
def test_common_prefix(self, make_path): path = Path(make_path('/some/absolute/path/to/somewhere.ext')) assert path.common_prefix('/some/absolute/path/to/elsewhere.ext' ) == Path('/some/absolute/path/to') with pytest.raises(ValueError, match='No common prefix found between'): path.common_prefix('path/to') with pytest.raises(ValueError, match='No common prefix found between'): path.common_prefix('some/relative/path/to/elsewhere.ext') path = Path(str('/some/absolute/path/to/somewhere.ext')) assert path.common_prefix( make_path('/some/absolute/path/to/elsewhere.ext')) == Path( '/some/absolute/path/to') with pytest.raises(ValueError, match='No common prefix found between'): path.common_prefix(make_path('path/to')) with pytest.raises(ValueError, match='No common prefix found between'): path.common_prefix( make_path('some/relative/path/to/elsewhere.ext'))
def _make_tree_level_from_dict(root, tree): pointer = Path(root) for name, value in tree.items(): if isinstance(value, dict): (pointer / name).mkdir(exist_ok=True) _make_tree_level_from_dict(pointer / name, value) else: with open(str(pointer / name), 'w') as f: f.write(value)
def __init__(self, name, path=None, epoch=None, hash=None): self.path = Path(path) if path is not None else path if self.path is not None and not self.path.is_file(): raise OSError('Invalid checkpoint: {} is not a file.'.format(path)) self.name = name self.epoch = epoch self.hash = MD5Checksum().validate( hash) if hash is not None else md5_checksum(self.path)
def tmp_files(tmp_path): tmp_path = Path(tmp_path) with open(str(tmp_path / 'small_0.text'), 'w') as f: f.write(_char_generator(size=1024)) with open(str(tmp_path / 'small_0.binary'), 'wb') as f: f.write(_char_generator(size=1024, to_bytes=True)) with open(str(tmp_path / 'large_0.text'), 'w') as f: f.write(_char_generator(size=4194304)) with open(str(tmp_path / 'large_0.binary'), 'wb') as f: f.write(_char_generator(size=4194304, to_bytes=True)) with open(str(tmp_path / 'small_1.text'), 'w') as f: f.write(_char_generator(size=1024)) with open(str(tmp_path / 'small_1.binary'), 'wb') as f: f.write(_char_generator(size=1024, to_bytes=True)) with open(str(tmp_path / 'large_1.text'), 'w') as f: f.write(_char_generator(size=4194304)) with open(str(tmp_path / 'large_1.binary'), 'wb') as f: f.write(_char_generator(size=4194304, to_bytes=True)) with open(str(tmp_path / 'small_2.text'), 'w') as f: f.write(_char_generator(size=1025)) with open(str(tmp_path / 'small_2.binary'), 'wb') as f: f.write(_char_generator(size=1025, to_bytes=True)) with open(str(tmp_path / 'large_2.text'), 'w') as f: f.write(_char_generator(size=4194305)) with open(str(tmp_path / 'large_2.binary'), 'wb') as f: f.write(_char_generator(size=4194305, to_bytes=True)) yield tmp_path os.remove(str(tmp_path / 'large_2.binary')) os.remove(str(tmp_path / 'large_2.text')) os.remove(str(tmp_path / 'small_2.binary')) os.remove(str(tmp_path / 'small_2.text')) os.remove(str(tmp_path / 'large_1.binary')) os.remove(str(tmp_path / 'large_1.text')) os.remove(str(tmp_path / 'small_1.binary')) os.remove(str(tmp_path / 'small_1.text')) os.remove(str(tmp_path / 'large_0.binary')) os.remove(str(tmp_path / 'large_0.text')) os.remove(str(tmp_path / 'small_0.binary')) os.remove(str(tmp_path / 'small_0.text'))
def test_invalid_types(self): with pytest.raises( TypeError, match='expected str, bytes or os.PathLike object, not'): Path(1) with pytest.raises( TypeError, match='expected str, bytes or os.PathLike object, not'): Path((1, )) with pytest.raises( TypeError, match='expected str, bytes or os.PathLike object, not'): Path([1]) with pytest.raises( TypeError, match='expected str, bytes or os.PathLike object, not'): Path(0) with pytest.raises( TypeError, match='expected str, bytes or os.PathLike object, not'): Path([])
def tmp_dir_structure(self, tmp_path, request): base = str(tmp_path) os.mkdir(os.path.join(base, 'dirA')) os.mkdir(os.path.join(base, 'dirB')) os.mkdir(os.path.join(base, 'dir.C')) os.mkdir(os.path.join(base, 'dir.C', 'dirD')) with open(os.path.join(base, 'fileA'), 'wb') as f: f.write(b"this is file A\n") with open(os.path.join(base, 'dirB', 'fileB'), 'wb') as f: f.write(b"this is file B\n") with open(os.path.join(base, 'dir.C', 'fileC'), 'wb') as f: f.write(b"this is file C\n") with open(os.path.join(base, 'dir.C', 'dirD', 'fileD'), 'wb') as f: f.write(b"this is file D\n") def dirlink(src, dest): os.symlink(src, dest) # Relative symlinks os.symlink('fileA', os.path.join(base, 'linkA')) os.symlink('non-existing', os.path.join(base, 'brokenLink')) dirlink('dirB', os.path.join(base, 'linkB')) dirlink(os.path.join('..', 'dirB'), os.path.join(base, 'dirA', 'linkC')) # This one goes upwards but doesn't create a loop dirlink(os.path.join('..', 'dirB'), os.path.join(base, 'dirB', 'linkD')) if request.param == 'pathlib': yield pathlib.Path(str(base)) elif request.param == 'Path': yield Path(str(base)) else: yield str(base) os.remove(os.path.join(base, 'dirB', 'linkD')) os.remove(os.path.join(base, 'dirA', 'linkC')) os.remove(os.path.join(base, 'linkB')) os.remove(os.path.join(base, 'brokenLink')) os.remove(os.path.join(base, 'linkA')) os.remove(os.path.join(base, 'dir.C', 'dirD', 'fileD')) os.remove(os.path.join(base, 'dir.C', 'fileC')) os.remove(os.path.join(base, 'dirB', 'fileB')) os.remove(os.path.join(base, 'fileA')) os.rmdir(os.path.join(base, 'dir.C', 'dirD')) os.rmdir(os.path.join(base, 'dir.C')) os.rmdir(os.path.join(base, 'dirB')) os.rmdir(os.path.join(base, 'dirA'))
def is_duplicate(file_1, file_2, hash_1=None, hash_2=None, checksum=md5_checksum): """Compare two files and return wether their are duplicates. For efficiency, a first decision is taken based on the two files size. If it is not enough, their content checksum are used. Args: file_1 (PathLike): The first file to compare. file_2 (PathLike): The second file to compare. hash_1 (str): Optional. Default to ``None``. If provided, it is used in place of the first file checksum to compare content. hash_2 (str): Optional. Default to ``None``. If provided, it is used in place of the second file checksum to compare content. checksum (Callable): A function which computes a checksum from a |Path|. Returns: bool: ``True`` if both files have the same content. """ file_1 = Path(file_1) file_2 = Path(file_2) if os.path.getsize(str(file_1)) != os.path.getsize(str(file_2)): return False hash_1 = hash_1 if hash_1 is not None else checksum(file_1) hash_2 = hash_2 if hash_2 is not None else checksum(file_2) if hash_1 != hash_2: return False return True
def save(self, filepath): """Save an |Image| instance wrapping an HWC :class:`~numpy.ndarray`. Args: filepath (PathLike): The path to the image file on disk. """ filepath = Path(filepath) if filepath.ext.lower() in ('jpg', 'jpeg'): _dump_jpg(filepath, self._array_data) elif filepath.ext.lower() == 'png': _dump_png(filepath, self._array_data) else: raise ValueError('Unsupported image type: {}.'.format( filepath.ext))
def __call__(self, path): """Construct a |Taxonomy| from the exported dataset `taxonomy.json` file. Args: path (PathLike): A path to a single Playground dataset. Returns: Taxonomy: The dataset taxonomy. """ path = Path(path) taxonomy_descriptor = load(path / 'taxonomy.json') taxonomy = Taxonomy() self._make_tree(taxonomy.root, taxonomy_descriptor) return taxonomy
def test_glob_rglob(self, tmp_dir_structure): assert {str(p) for p in Path(tmp_dir_structure).rglob('*')} == \ {str(p) for p in Path(tmp_dir_structure).glob('**/*')} assert {str(p) for p in Path(tmp_dir_structure).rglob('*')} == \ {str(p) for p in pathlib.Path(str(tmp_dir_structure)).rglob('*')} assert {str(p) for p in Path(tmp_dir_structure).glob('dirB/*')} == \ {str(p) for p in pathlib.Path(str(tmp_dir_structure)).glob('dirB/*')} assert {str(p) for p in Path(tmp_dir_structure).glob('**')} == \ {str(p) for p in pathlib.Path(str(tmp_dir_structure)).glob('**')} assert {str(p) for p in Path(tmp_dir_structure).glob('**/**')} == \ {str(p) for p in pathlib.Path(str(tmp_dir_structure)).glob('**/**')} assert {str(p) for p in Path(tmp_dir_structure).glob('**/**/*')} == \ {str(p) for p in pathlib.Path(str(tmp_dir_structure)).glob('**/**/*')} assert {str(p) for p in Path(tmp_dir_structure).glob('dirB/**/*')} == \ {str(p) for p in pathlib.Path(str(tmp_dir_structure)).glob('dirB/**/*')} assert {str(p) for p in Path(tmp_dir_structure).glob('**/dirD/*')} == \ {str(p) for p in pathlib.Path(str(tmp_dir_structure)).glob('**/dirD/*')}
def load(cls, filepath): """Load an image as an RGB :class:`~numpy.ndarray`. Args: filepath (PathLike): The path to the image file on disk. Returns: (|Image|): An |Image| instance wrapping an HWC :class:`~numpy.ndarray`. """ filepath = Path(filepath) image_type = imghdr.what(str(filepath)) if image_type == 'jpeg': return cls(_load_jpg(filepath)) elif image_type == 'png': return cls(_load_png(filepath)) else: raise TypeError('Unsupported image type: {}.'.format(image_type))