コード例 #1
0
ファイル: options.py プロジェクト: snowman2/rasterio
def file_in_handler(ctx, param, value):
    """Normalize ordinary filesystem and VFS paths"""
    try:
        path = _parse_path(value)

        if isinstance(path, _UnparsedPath):

            if os.path.exists(path.path) and rasterio.shutil.exists(value):
                return abspath_forward_slashes(path.path)
            else:
                return path.name

        elif path.scheme and path.is_remote:
            return path.name

        elif path.archive:
            if os.path.exists(path.archive) and rasterio.shutil.exists(value):
                archive = abspath_forward_slashes(path.archive)
                return "{}://{}!{}".format(path.scheme, archive, path.path)
            else:
                raise OSError("Input archive {} does not exist".format(
                    path.archive))

        else:
            if os.path.exists(path.path) and rasterio.shutil.exists(value):
                return abspath_forward_slashes(path.path)
            else:
                raise OSError("Input file {} does not exist".format(path.path))

    except Exception:
        raise click.BadParameter("{} is not a valid input file".format(value))
コード例 #2
0
ファイル: test_path.py プロジェクト: snowman2/rasterio
def test_parse_path_accept_get_params():
    # See https://github.com/rasterio/rasterio/issues/1121
    parsed = _parse_path('http://example.com/index?a=1')
    assert isinstance(parsed, _ParsedPath)
    assert parsed.path == 'example.com/index?a=1'
    assert parsed.archive is None
    assert parsed.scheme == 'http'
コード例 #3
0
    def cls_from_path(path):
        """Find the session class suited to the data at `path`.

        Parameters
        ----------
        path : str
            A dataset path or identifier.

        Returns
        -------
        class

        """
        if not path:
            return DummySession

        path = _parse_path(path)

        if isinstance(path, _UnparsedPath) or path.is_local:
            return DummySession

        elif (
            path.scheme == "s3" or "amazonaws.com" in path.path
        ) and not "X-Amz-Signature" in path.path:
            if boto3 is not None:
                return AWSSession
            else:
                log.info("boto3 not available, falling back to a DummySession.")
                return DummySession

        elif path.scheme == "oss" or "aliyuncs.com" in path.path:
            return OSSSession

        elif path.path.startswith("/vsiswift/"):
            return SwiftSession

        elif path.scheme == "az":
            return AzureSession

        # This factory can be extended to other cloud providers here.
        # elif path.scheme == "cumulonimbus":  # for example.
        #     return CumulonimbusSession(*args, **kwargs)

        else:
            return DummySession
コード例 #4
0
ファイル: test_path.py プロジェクト: snowman2/rasterio
def test_parse_gdal():
    """GDAL dataset identifiers fall through properly"""
    assert _parse_path('GDAL:filepath:varname').path == 'GDAL:filepath:varname'
コード例 #5
0
ファイル: test_path.py プロジェクト: snowman2/rasterio
def test_parse_gdal_vsi():
    """GDAL dataset identifiers fall through properly"""
    assert _parse_path('/vsifoo/bar').path == '/vsifoo/bar'
コード例 #6
0
ファイル: test_path.py プロジェクト: snowman2/rasterio
def test_parse_path_file():
    """Correctly parse an ordinary filesystem path"""
    parsed = _parse_path('/foo.tif')
    assert parsed.path == '/foo.tif'
コード例 #7
0
ファイル: test_path.py プロジェクト: snowman2/rasterio
def test_parse_path_file_scheme():
    """Correctly parse file:// URL"""
    parsed = _parse_path('file://foo.tif')
    assert parsed.path == 'foo.tif'
    assert parsed.archive is None
    assert parsed.scheme == 'file'
コード例 #8
0
ファイル: test_path.py プロジェクト: snowman2/rasterio
def test_parse_path_zip_and_file():
    """Correctly parse zip+file scheme URL"""
    parsed = _parse_path('zip+file://tests/data/files.zip!foo.tif')
    assert parsed.path == 'foo.tif'
    assert parsed.archive == 'tests/data/files.zip'
    assert parsed.scheme == 'zip+file'
コード例 #9
0
ファイル: test_path.py プロジェクト: snowman2/rasterio
def test_parse_path_win_no_pathlib(monkeypatch):
    monkeypatch.setattr(rasterio._path.sys, "platform", "win32")
    monkeypatch.setattr(rasterio._path, "pathlib", None)
    assert isinstance(_parse_path(r"C:\foo\bar.tif"), _UnparsedPath)
コード例 #10
0
ファイル: test_path.py プロジェクト: snowman2/rasterio
def test_parse_path():
    pathlib = pytest.importorskip("pathlib")
    assert isinstance(_parse_path(pathlib.Path("/foo/bar.tif")), _ParsedPath)
コード例 #11
0
ファイル: test_path.py プロジェクト: snowman2/rasterio
def test_path_error(path):
    with pytest.raises(PathError):
        _parse_path(path)
コード例 #12
0
ファイル: test_path.py プロジェクト: snowman2/rasterio
def test_driver_prefixed_path(path):
    parsed = _parse_path(path)
    assert isinstance(parsed, _UnparsedPath)
コード例 #13
0
ファイル: test_path.py プロジェクト: snowman2/rasterio
def test_vsi_path_zip_plus_https():
    """A zip+https:// URLs vsi path is correct (see #1151)"""
    url = 'zip+https://example.com/foo.zip!bar.tif'
    assert _vsi_path(_parse_path(url)) == '/vsizip/vsicurl/https://example.com/foo.zip/bar.tif'
コード例 #14
0
ファイル: test_path.py プロジェクト: snowman2/rasterio
def test_vsi_path_zip():
    """A zip:// URLs vsi path is correct (see #1377)"""
    url = 'zip:///path/to/zip/some.zip!path/to/file.txt'
    assert _vsi_path(_parse_path(url)) == '/vsizip//path/to/zip/some.zip/path/to/file.txt'
コード例 #15
0
ファイル: test_path.py プロジェクト: snowman2/rasterio
def test_parse_windows_path(monkeypatch):
    """Return Windows paths unparsed"""
    monkeypatch.setattr(sys, 'platform', 'win32')
    assert _parse_path(r'C:\\foo.tif').path == r'C:\\foo.tif'
コード例 #16
0
def open(fp, mode='r', driver=None, width=None, height=None, count=None,
         crs=None, transform=None, dtype=None, nodata=None, sharing=False,
         **kwargs):
    """Open a dataset for reading or writing.

    The dataset may be located in a local file, in a resource located by
    a URL, or contained within a stream of bytes.

    In read ('r') or read/write ('r+') mode, no keyword arguments are
    required: these attributes are supplied by the opened dataset.

    In write ('w' or 'w+') mode, the driver, width, height, count, and dtype
    keywords are strictly required.

    Parameters
    ----------
    fp : str, file object, PathLike object, FilePath, or MemoryFile
        A filename or URL, a file object opened in binary ('rb') mode, a
        Path object, or one of the rasterio classes that provides the
        dataset-opening interface (has an open method that returns a
        dataset).
    mode : str, optional
        'r' (read, the default), 'r+' (read/write), 'w' (write), or
        'w+' (write/read).
    driver : str, optional
        A short format driver name (e.g. "GTiff" or "JPEG") or a list of
        such names (see GDAL docs at
        https://gdal.org/drivers/raster/index.html). In 'w' or 'w+' modes
        a single name is required. In 'r' or 'r+' modes the driver can
        usually be omitted. Registered drivers will be tried
        sequentially until a match is found. When multiple drivers are
        available for a format such as JPEG2000, one of them can be
        selected by using this keyword argument.
    width : int, optional
        The number of columns of the raster dataset. Required in 'w' or
        'w+' modes, it is ignored in 'r' or 'r+' modes.
    height : int, optional
        The number of rows of the raster dataset. Required in 'w' or
        'w+' modes, it is ignored in 'r' or 'r+' modes.
    count : int, optional
        The count of dataset bands. Required in 'w' or 'w+' modes, it is
        ignored in 'r' or 'r+' modes.
    crs : str, dict, or CRS; optional
        The coordinate reference system. Required in 'w' or 'w+' modes,
        it is ignored in 'r' or 'r+' modes.
    transform : Affine instance, optional
        Affine transformation mapping the pixel space to geographic
        space. Required in 'w' or 'w+' modes, it is ignored in 'r' or
        'r+' modes.
    dtype : str or numpy dtype
        The data type for bands. For example: 'uint8' or
        ``rasterio.uint16``. Required in 'w' or 'w+' modes, it is
        ignored in 'r' or 'r+' modes.
    nodata : int, float, or nan; optional
        Defines the pixel value to be interpreted as not valid data.
        Required in 'w' or 'w+' modes, it is ignored in 'r' or 'r+'
        modes.
    sharing : bool; optional
        To reduce overhead and prevent programs from running out of file
        descriptors, rasterio maintains a pool of shared low level
        dataset handles. When `True` this function will use a shared
        handle if one is available. Multithreaded programs must avoid
        sharing and should set *sharing* to `False`.
    kwargs : optional
        These are passed to format drivers as directives for creating or
        interpreting datasets. For example: in 'w' or 'w+' modes
        a `tiled=True` keyword argument will direct the GeoTIFF format
        driver to create a tiled, rather than striped, TIFF.

    Returns
    -------
    A ``DatasetReader`` or ``DatasetWriter`` object.

    Examples
    --------

    To open a GeoTIFF for reading using standard driver discovery and
    no directives:

    >>> import rasterio
    >>> with rasterio.open('example.tif') as dataset:
    ...     print(dataset.profile)

    To open a JPEG2000 using only the JP2OpenJPEG driver:

    >>> with rasterio.open(
    ...         'example.jp2', driver='JP2OpenJPEG') as dataset:
    ...     print(dataset.profile)

    To create a new 8-band, 16-bit unsigned, tiled, and LZW-compressed
    GeoTIFF with a global extent and 0.5 degree resolution:

    >>> from rasterio.transform import from_origin
    >>> with rasterio.open(
    ...         'example.tif', 'w', driver='GTiff', dtype='uint16',
    ...         width=720, height=360, count=8, crs='EPSG:4326',
    ...         transform=from_origin(-180.0, 90.0, 0.5, 0.5),
    ...         nodata=0, tiled=True, compress='lzw') as dataset:
    ...     dataset.write(...)
    """

    if not isinstance(fp, str):
        if not (
            hasattr(fp, "read")
            or hasattr(fp, "write")
            or isinstance(fp, (os.PathLike, MemoryFile, FilePath))
        ):
            raise TypeError("invalid path or file: {0!r}".format(fp))
    if mode and not isinstance(mode, str):
        raise TypeError("invalid mode: {0!r}".format(mode))
    if driver and not isinstance(driver, str):
        raise TypeError("invalid driver: {0!r}".format(driver))
    if dtype and not check_dtype(dtype):
        raise TypeError("invalid dtype: {0!r}".format(dtype))
    if nodata is not None:
        nodata = float(nodata)
    if transform:
        transform = guard_transform(transform)

    # Check driver/mode blacklist.
    if driver and is_blacklisted(driver, mode):
        raise RasterioIOError(
            "Blacklisted: file cannot be opened by "
            "driver '{0}' in '{1}' mode".format(driver, mode))

    # If the fp argument is a file-like object and can be adapted by
    # rasterio's FilePath we do so. Otherwise, we use a MemoryFile to
    # hold fp's contents and store that in an ExitStack attached to the
    # dataset object that we will return. When a dataset's close method
    # is called, this ExitStack will be unwound and the MemoryFile's
    # storage will be cleaned up.
    if mode == 'r' and hasattr(fp, 'read'):
        if have_vsi_plugin:
            return FilePath(fp).open(driver=driver, sharing=sharing, **kwargs)
        else:
            memfile = MemoryFile(fp.read())
            dataset = memfile.open(driver=driver, sharing=sharing, **kwargs)
            dataset._env.enter_context(memfile)
            return dataset

    elif mode in ('w', 'w+') and hasattr(fp, 'write'):
        memfile = MemoryFile()
        dataset = memfile.open(
            driver=driver,
            width=width,
            height=height,
            count=count,
            crs=crs,
            transform=transform,
            dtype=dtype,
            nodata=nodata,
            sharing=sharing,
            **kwargs
        )
        dataset._env.enter_context(memfile)

        # For the writing case we push an extra callback onto the
        # ExitStack. It ensures that the MemoryFile's contents are
        # copied to the open file object.
        def func(*args, **kwds):
            memfile.seek(0)
            fp.write(memfile.read())

        dataset._env.callback(func)
        return dataset

    # TODO: test for a shared base class or abstract type.
    elif isinstance(fp, (FilePath, MemoryFile)):
        if mode.startswith("r"):
            dataset = fp.open(driver=driver, sharing=sharing, **kwargs)

        # Note: FilePath does not support writing and an exception will
        # result from this.
        elif mode.startswith("w"):
            dataset = fp.open(
                driver=driver,
                width=width,
                height=height,
                count=count,
                crs=crs,
                transform=transform,
                dtype=dtype,
                nodata=nodata,
                sharing=sharing,
                **kwargs
            )
        return dataset

    # At this point, the fp argument is a string or path-like object
    # which can be converted to a string.
    else:
        raw_dataset_path = os.fspath(fp)
        path = _parse_path(raw_dataset_path)

        if mode == "r":
            dataset = DatasetReader(path, driver=driver, sharing=sharing, **kwargs)
        elif mode == "r+":
            dataset = get_writer_for_path(path, driver=driver)(
                path, mode, driver=driver, sharing=sharing, **kwargs
            )
        elif mode.startswith("w"):
            if not driver:
                driver = driver_from_extension(path)
            writer = get_writer_for_driver(driver)
            if writer is not None:
                dataset = writer(
                    path,
                    mode,
                    driver=driver,
                    width=width,
                    height=height,
                    count=count,
                    crs=crs,
                    transform=transform,
                    dtype=dtype,
                    nodata=nodata,
                    sharing=sharing,
                    **kwargs
                )
            else:
                raise DriverCapabilityError(
                    "Writer does not exist for driver: %s" % str(driver)
                )
        else:
            raise DriverCapabilityError(
                "mode must be one of 'r', 'r+', or 'w', not %s" % mode)

        return dataset
コード例 #17
0
ファイル: test_path.py プロジェクト: snowman2/rasterio
def test_parse_path_win():
    pathlib = pytest.importorskip("pathlib")
    assert isinstance(_parse_path(pathlib.PureWindowsPath(r"C:\foo\bar.tif")), _ParsedPath)
コード例 #18
0
ファイル: vrt.py プロジェクト: snowman2/rasterio
def _boundless_vrt_doc(src_dataset,
                       nodata=None,
                       background=None,
                       hidenodata=False,
                       width=None,
                       height=None,
                       transform=None,
                       masked=False):
    """Make a VRT XML document.

    Parameters
    ----------
    src_dataset : Dataset
        The dataset to wrap.
    background : int or float, optional
        The background fill value for the boundless VRT.
    masked : bool
        If True, the src_dataset is replaced by its valid data mask.

    Returns
    -------
    str
        An XML text string.
    """

    nodata = nodata or src_dataset.nodata
    width = width or src_dataset.width
    height = height or src_dataset.height
    transform = transform or src_dataset.transform

    vrtdataset = ET.Element('VRTDataset')
    vrtdataset.attrib['rasterYSize'] = str(height)
    vrtdataset.attrib['rasterXSize'] = str(width)
    srs = ET.SubElement(vrtdataset, 'SRS')
    srs.text = src_dataset.crs.wkt if src_dataset.crs else ""
    geotransform = ET.SubElement(vrtdataset, 'GeoTransform')
    geotransform.text = ','.join([str(v) for v in transform.to_gdal()])

    for bidx, ci, block_shape, dtype in zip(src_dataset.indexes,
                                            src_dataset.colorinterp,
                                            src_dataset.block_shapes,
                                            src_dataset.dtypes):
        vrtrasterband = ET.SubElement(vrtdataset, 'VRTRasterBand')
        vrtrasterband.attrib['dataType'] = _gdal_typename(dtype)
        vrtrasterband.attrib['band'] = str(bidx)

        if background is not None or nodata is not None:
            nodatavalue = ET.SubElement(vrtrasterband, 'NoDataValue')
            nodatavalue.text = str(background or nodata)

            if hidenodata:
                hidenodatavalue = ET.SubElement(vrtrasterband,
                                                'HideNoDataValue')
                hidenodatavalue.text = "1"

        colorinterp = ET.SubElement(vrtrasterband, 'ColorInterp')
        colorinterp.text = ci.name.capitalize()

        complexsource = ET.SubElement(vrtrasterband, 'ComplexSource')
        sourcefilename = ET.SubElement(complexsource, 'SourceFilename')
        sourcefilename.attrib['relativeToVRT'] = "0"
        sourcefilename.attrib["shared"] = "0"
        sourcefilename.text = _parse_path(src_dataset.name).as_vsi()
        sourceband = ET.SubElement(complexsource, 'SourceBand')
        sourceband.text = str(bidx)
        sourceproperties = ET.SubElement(complexsource, 'SourceProperties')
        sourceproperties.attrib['RasterXSize'] = str(width)
        sourceproperties.attrib['RasterYSize'] = str(height)
        sourceproperties.attrib['dataType'] = _gdal_typename(dtype)
        sourceproperties.attrib['BlockYSize'] = str(block_shape[0])
        sourceproperties.attrib['BlockXSize'] = str(block_shape[1])
        srcrect = ET.SubElement(complexsource, 'SrcRect')
        srcrect.attrib['xOff'] = '0'
        srcrect.attrib['yOff'] = '0'
        srcrect.attrib['xSize'] = str(src_dataset.width)
        srcrect.attrib['ySize'] = str(src_dataset.height)
        dstrect = ET.SubElement(complexsource, 'DstRect')
        dstrect.attrib['xOff'] = str(
            (src_dataset.transform.xoff - transform.xoff) / transform.a)
        dstrect.attrib['yOff'] = str(
            (src_dataset.transform.yoff - transform.yoff) / transform.e)
        dstrect.attrib['xSize'] = str(src_dataset.width *
                                      src_dataset.transform.a / transform.a)
        dstrect.attrib['ySize'] = str(src_dataset.height *
                                      src_dataset.transform.e / transform.e)

        if src_dataset.nodata is not None:
            nodata_elem = ET.SubElement(complexsource, 'NODATA')
            nodata_elem.text = str(src_dataset.nodata)

        if src_dataset.options is not None:
            openoptions = ET.SubElement(complexsource, 'OpenOptions')
            for ookey, oovalue in src_dataset.options.items():
                ooi = ET.SubElement(openoptions, 'OOI')
                ooi.attrib['key'] = str(ookey)
                ooi.text = str(oovalue)

        # Effectively replaces all values of the source dataset with
        # 255.  Due to GDAL optimizations, the source dataset will not
        # be read, so we get a performance improvement.
        if masked:
            scaleratio = ET.SubElement(complexsource, 'ScaleRatio')
            scaleratio.text = '0'
            scaleoffset = ET.SubElement(complexsource, 'ScaleOffset')
            scaleoffset.text = '255'

    if all(MaskFlags.per_dataset in flags
           for flags in src_dataset.mask_flag_enums):
        maskband = ET.SubElement(vrtdataset, 'MaskBand')
        vrtrasterband = ET.SubElement(maskband, 'VRTRasterBand')
        vrtrasterband.attrib['dataType'] = 'Byte'

        simplesource = ET.SubElement(vrtrasterband, 'SimpleSource')
        sourcefilename = ET.SubElement(simplesource, 'SourceFilename')
        sourcefilename.attrib['relativeToVRT'] = "0"
        sourcefilename.attrib["shared"] = "0"
        sourcefilename.text = _parse_path(src_dataset.name).as_vsi()

        sourceband = ET.SubElement(simplesource, 'SourceBand')
        sourceband.text = 'mask,1'
        sourceproperties = ET.SubElement(simplesource, 'SourceProperties')
        sourceproperties.attrib['RasterXSize'] = str(width)
        sourceproperties.attrib['RasterYSize'] = str(height)
        sourceproperties.attrib['dataType'] = 'Byte'
        sourceproperties.attrib['BlockYSize'] = str(block_shape[0])
        sourceproperties.attrib['BlockXSize'] = str(block_shape[1])
        srcrect = ET.SubElement(simplesource, 'SrcRect')
        srcrect.attrib['xOff'] = '0'
        srcrect.attrib['yOff'] = '0'
        srcrect.attrib['xSize'] = str(src_dataset.width)
        srcrect.attrib['ySize'] = str(src_dataset.height)
        dstrect = ET.SubElement(simplesource, 'DstRect')
        dstrect.attrib['xOff'] = str(
            (src_dataset.transform.xoff - transform.xoff) / transform.a)
        dstrect.attrib['yOff'] = str(
            (src_dataset.transform.yoff - transform.yoff) / transform.e)
        dstrect.attrib['xSize'] = str(src_dataset.width)
        dstrect.attrib['ySize'] = str(src_dataset.height)

    return ET.tostring(vrtdataset).decode('ascii')