Пример #1
0
    def add_band(self,
                 dtype,
                 band_idx,
                 color_interp,
                 nodata=None,
                 hidenodata=False):
        vrtrasterband = ET.SubElement(self.vrtdataset, 'VRTRasterBand')
        dtype = dtype if isinstance(dtype, str) else dtype.name
        vrtrasterband.attrib['dataType'] = _gdal_typename(
            dtype) if check_dtype(dtype) else dtype
        vrtrasterband.attrib['band'] = str(band_idx)

        if nodata is not None:
            nodatavalue = ET.SubElement(vrtrasterband, 'NoDataValue')
            nodatavalue.text = str(nodata)

            if hidenodata:
                hidenodatavalue = ET.SubElement(vrtrasterband,
                                                'HideNoDataValue')
                hidenodatavalue.text = "1" if hidenodata else "0"

        colorinterp = ET.SubElement(vrtrasterband, 'ColorInterp')
        colorinterp.text = color_interp.capitalize()

        return vrtrasterband
Пример #2
0
 def _setup_band_simplesource(self, simplesource, band_idx, dtype, relative_to_vrt, file_name,
                              rasterxsize, rasterysize, blockxsize, blockysize, nodata):
     sourcefilename = ET.SubElement(simplesource, 'SourceFilename')
     sourcefilename.attrib['relativeToVRT'] = "1" if relative_to_vrt else "0"
     sourcefilename.text = vsi_path(parse_path(file_name))
     sourceband = ET.SubElement(simplesource, 'SourceBand')
     sourceband.text = str(band_idx)
     sourceproperties = ET.SubElement(simplesource, 'SourceProperties')
     sourceproperties.attrib['RasterXSize'] = str(rasterxsize)
     sourceproperties.attrib['RasterYSize'] = str(rasterysize)
     if blockxsize is not None and blockysize is not None:
         sourceproperties.attrib['BlockXSize'] = str(blockxsize)
         sourceproperties.attrib['BlockYSize'] = str(blockysize)
     dtype = dtype if isinstance(dtype, str) else dtype.name
     sourceproperties.attrib['DataType'] = _gdal_typename(dtype) if check_dtype(dtype) else dtype
Пример #3
0
def open(fp, mode='r', driver=None, width=None, height=None, count=None,
         crs=None, transform=None, dtype=None, nodata=None, **kwargs):
    """Open a dataset for reading or writing.

    The dataset may be located in a local file, in a resource located by
    a URL, or contained within a stream of bytes.

    In read ('r') or read/write ('r+') mode, no keyword arguments are
    required: these attributes are supplied by the opened dataset.

    In write ('w') mode, the driver, width, height, count, and dtype
    keywords are strictly required.

    Parameters
    ----------
    fp : str or file object
        A filename or URL, or file object opened in binary ('rb') mode
    mode : str, optional
        'r' (read, the default), 'r+' (read/write), or 'w' (write)
    driver : str, optional
        A short format driver name (e.g. "GTiff" or "JPEG") or a list of
        such names (see GDAL docs at
        http://www.gdal.org/formats_list.html). In 'w' mode a single
        name is required. In 'r' or 'r+' mode the driver can usually be
        omitted. Registered drivers will be tried sequentially until a
        match is found. When multiple drivers are available for a format
        such as JPEG2000, one of them can be selected by using this
        keyword argument.
    width, height : int, optional
        The numbers of rows and columns of the raster dataset. Required
        in 'w' mode, they are ignored in 'r' or 'r+' mode.
    count : int, optional
        The count of dataset bands. Required in 'w' mode, it is ignored
        in 'r' or 'r+' mode.
    dtype : str or numpy dtype
        The data type for bands. For example: 'uint8' or
        ``rasterio.uint16``. Required in 'w' mode, it is ignored in
        'r' or 'r+' mode.
    crs : str, dict, or CRS; optional
        The coordinate reference system. Required in 'w' mode, it is
        ignored in 'r' or 'r+' mode.
    transform : Affine instance, optional
        Affine transformation mapping the pixel space to geographic
        space. Required in 'w' mode, it is ignored in 'r' or 'r+' mode.
    nodata : int, float, or nan; optional
        Defines the pixel value to be interpreted as not valid data.
        Required in 'w' mode, it is ignored in 'r' or 'r+' mode.
    kwargs : optional
        These are passed to format drivers as directives for creating
        or interpreting datasets. For example: in 'w' a `tiled=True`
        keyword argument will direct the GeoTIFF format driver to
        create a tiled, rather than striped, TIFF.

    Returns
    -------
    A ``DatasetReader`` or ``DatasetUpdater`` object.

    Examples
    --------

    To open a GeoTIFF for reading using standard driver discovery and
    no directives:

    >>> import rasterio
    >>> with rasterio.open('example.tif') as dataset:
    ...     print(dataset.profile)

    To open a JPEG2000 using only the JP2OpenJPEG driver:

    >>> with rasterio.open(
    ...         'example.jp2', driver='JP2OpenJPEG') as dataset:
    ...     print(dataset.profile)

    To create a new 8-band, 16-bit unsigned, tiled, and LZW-compressed
    GeoTIFF with a global extent and 0.5 degree resolution:

    >>> from rasterio.transform import from_origin
    >>> with rasterio.open(
    ...         'example.tif', 'w', driver='GTiff', dtype='uint16',
    ...         width=720, height=360, count=8, crs='EPSG:4326',
    ...         transform=from_origin(-180.0, 90.0, 0.5, 0.5),
    ...         nodata=0, tiled=True, compress='lzw') as dataset:
    ...     dataset.write(...)
    """

    if not isinstance(fp, string_types):
        if not (hasattr(fp, 'read') or hasattr(fp, 'write')):
            raise TypeError("invalid path or file: {0!r}".format(fp))
    if mode and not isinstance(mode, string_types):
        raise TypeError("invalid mode: {0!r}".format(mode))
    if driver and not isinstance(driver, string_types):
        raise TypeError("invalid driver: {0!r}".format(driver))
    if dtype and not check_dtype(dtype):
        raise TypeError("invalid dtype: {0!r}".format(dtype))
    if nodata is not None:
        nodata = float(nodata)
    if 'affine' in kwargs:
        # DeprecationWarning's are ignored by default
        with warnings.catch_warnings():
            warnings.warn(
                "The 'affine' kwarg in rasterio.open() is deprecated at 1.0 "
                "and only remains to ease the transition.  Please switch to "
                "the 'transform' kwarg.  See "
                "https://github.com/mapbox/rasterio/issues/86 for details.",
                DeprecationWarning,
                stacklevel=2)

            if transform:
                warnings.warn(
                    "Found both 'affine' and 'transform' in rasterio.open() - "
                    "choosing 'transform'")
                transform = transform
            else:
                transform = kwargs.pop('affine')

    if transform:
        transform = guard_transform(transform)

    # Check driver/mode blacklist.
    if driver and is_blacklisted(driver, mode):
        raise RasterioIOError(
            "Blacklisted: file cannot be opened by "
            "driver '{0}' in '{1}' mode".format(driver, mode))

    # Special case for file object argument.
    if mode == 'r' and hasattr(fp, 'read'):

        @contextmanager
        def fp_reader(fp):
            memfile = MemoryFile(fp.read())
            dataset = memfile.open()
            try:
                yield dataset
            finally:
                dataset.close()
                memfile.close()

        return fp_reader(fp)

    elif mode == 'w' and hasattr(fp, 'write'):

        @contextmanager
        def fp_writer(fp):
            memfile = MemoryFile()
            dataset = memfile.open(driver=driver, width=width, height=height,
                                   count=count, crs=crs, transform=transform,
                                   dtype=dtype, nodata=nodata, **kwargs)
            try:
                yield dataset
            finally:
                dataset.close()
                memfile.seek(0)
                fp.write(memfile.read())
                memfile.close()

        return fp_writer(fp)

    else:
        # The 'normal' filename or URL path.
        _, _, scheme = parse_path(fp)

        with Env() as env:
            if scheme == 's3':
                env.credentialize()

            # Create dataset instances and pass the given env, which will
            # be taken over by the dataset's context manager if it is not
            # None.
            if mode == 'r':
                s = DatasetReader(fp, driver=driver, **kwargs)
            elif mode == 'r-':
                warnings.warn("'r-' mode is deprecated, use 'r'",
                              DeprecationWarning)
                s = DatasetReader(fp)
            elif mode == 'r+':
                s = get_writer_for_path(fp)(fp, mode, driver=driver, **kwargs)
            elif mode == 'w':
                s = get_writer_for_driver(driver)(fp, mode, driver=driver,
                                                  width=width, height=height,
                                                  count=count, crs=crs,
                                                  transform=transform,
                                                  dtype=dtype, nodata=nodata,
                                                  **kwargs)
            else:
                raise ValueError(
                    "mode must be one of 'r', 'r+', or 'w', not %s" % mode)
            return s
Пример #4
0
def open(fp, mode='r', driver=None, width=None, height=None, count=None,
         crs=None, transform=None, dtype=None, nodata=None, sharing=True,
         **kwargs):
    """Open a dataset for reading or writing.

    The dataset may be located in a local file, in a resource located by
    a URL, or contained within a stream of bytes.

    In read ('r') or read/write ('r+') mode, no keyword arguments are
    required: these attributes are supplied by the opened dataset.

    In write ('w' or 'w+') mode, the driver, width, height, count, and dtype
    keywords are strictly required.

    Parameters
    ----------
    fp : str, file object or pathlib.Path object
        A filename or URL, a file object opened in binary ('rb') mode,
        or a Path object.
    mode : str, optional
        'r' (read, the default), 'r+' (read/write), 'w' (write), or
        'w+' (write/read).
    driver : str, optional
        A short format driver name (e.g. "GTiff" or "JPEG") or a list of
        such names (see GDAL docs at
        http://www.gdal.org/formats_list.html). In 'w' or 'w+' modes
        a single name is required. In 'r' or 'r+' modes the driver can
        usually be omitted. Registered drivers will be tried
        sequentially until a match is found. When multiple drivers are
        available for a format such as JPEG2000, one of them can be
        selected by using this keyword argument.
    width, height : int, optional
        The numbers of rows and columns of the raster dataset. Required
        in 'w' or 'w+' modes, they are ignored in 'r' or 'r+' modes.
    count : int, optional
        The count of dataset bands. Required in 'w' or 'w+' modes, it is
        ignored in 'r' or 'r+' modes.
    dtype : str or numpy dtype
        The data type for bands. For example: 'uint8' or
        ``rasterio.uint16``. Required in 'w' or 'w+' modes, it is
        ignored in 'r' or 'r+' modes.
    crs : str, dict, or CRS; optional
        The coordinate reference system. Required in 'w' or 'w+' modes,
        it is ignored in 'r' or 'r+' modes.
    transform : Affine instance, optional
        Affine transformation mapping the pixel space to geographic
        space. Required in 'w' or 'w+' modes, it is ignored in 'r' or
        'r+' modes.
    nodata : int, float, or nan; optional
        Defines the pixel value to be interpreted as not valid data.
        Required in 'w' or 'w+' modes, it is ignored in 'r' or 'r+'
        modes.
    sharing : bool
        A flag that allows sharing of dataset handles. Default is
        `True`. Should be set to `False` in a multithreaded:w program.
    kwargs : optional
        These are passed to format drivers as directives for creating or
        interpreting datasets. For example: in 'w' or 'w+' modes
        a `tiled=True` keyword argument will direct the GeoTIFF format
        driver to create a tiled, rather than striped, TIFF.

    Returns
    -------
    A ``DatasetReader`` or ``DatasetUpdater`` object.

    Examples
    --------

    To open a GeoTIFF for reading using standard driver discovery and
    no directives:

    >>> import rasterio
    >>> with rasterio.open('example.tif') as dataset:
    ...     print(dataset.profile)

    To open a JPEG2000 using only the JP2OpenJPEG driver:

    >>> with rasterio.open(
    ...         'example.jp2', driver='JP2OpenJPEG') as dataset:
    ...     print(dataset.profile)

    To create a new 8-band, 16-bit unsigned, tiled, and LZW-compressed
    GeoTIFF with a global extent and 0.5 degree resolution:

    >>> from rasterio.transform import from_origin
    >>> with rasterio.open(
    ...         'example.tif', 'w', driver='GTiff', dtype='uint16',
    ...         width=720, height=360, count=8, crs='EPSG:4326',
    ...         transform=from_origin(-180.0, 90.0, 0.5, 0.5),
    ...         nodata=0, tiled=True, compress='lzw') as dataset:
    ...     dataset.write(...)
    """

    if not isinstance(fp, string_types):
        if not (hasattr(fp, 'read') or hasattr(fp, 'write') or isinstance(fp, Path)):
            raise TypeError("invalid path or file: {0!r}".format(fp))
    if mode and not isinstance(mode, string_types):
        raise TypeError("invalid mode: {0!r}".format(mode))
    if driver and not isinstance(driver, string_types):
        raise TypeError("invalid driver: {0!r}".format(driver))
    if dtype and not check_dtype(dtype):
        raise TypeError("invalid dtype: {0!r}".format(dtype))
    if nodata is not None:
        nodata = float(nodata)
    if transform:
        transform = guard_transform(transform)

    # Check driver/mode blacklist.
    if driver and is_blacklisted(driver, mode):
        raise RasterioIOError(
            "Blacklisted: file cannot be opened by "
            "driver '{0}' in '{1}' mode".format(driver, mode))

    # Special case for file object argument.
    if mode == 'r' and hasattr(fp, 'read'):

        @contextmanager
        def fp_reader(fp):
            memfile = MemoryFile(fp.read())
            dataset = memfile.open()
            try:
                yield dataset
            finally:
                dataset.close()
                memfile.close()

        return fp_reader(fp)

    elif mode in ('w', 'w+') and hasattr(fp, 'write'):

        @contextmanager
        def fp_writer(fp):
            memfile = MemoryFile()
            dataset = memfile.open(driver=driver, width=width, height=height,
                                   count=count, crs=crs, transform=transform,
                                   dtype=dtype, nodata=nodata, **kwargs)
            try:
                yield dataset
            finally:
                dataset.close()
                memfile.seek(0)
                fp.write(memfile.read())
                memfile.close()

        return fp_writer(fp)

    else:
        # If a pathlib.Path instance is given, convert it to a string path.
        if isinstance(fp, Path):
            fp = str(fp)

        # The 'normal' filename or URL path.
        path = parse_path(fp)

        # Create dataset instances and pass the given env, which will
        # be taken over by the dataset's context manager if it is not
        # None.
        if mode == 'r':
            s = DatasetReader(path, driver=driver, **kwargs)
        elif mode == 'r+':
            s = get_writer_for_path(path)(path, mode, driver=driver, **kwargs)
        elif mode.startswith("w"):
            s = get_writer_for_driver(driver)(path, mode, driver=driver,
                                              width=width, height=height,
                                              count=count, crs=crs,
                                              transform=transform,
                                              dtype=dtype, nodata=nodata,
                                              **kwargs)
        else:
            raise ValueError(
                "mode must be one of 'r', 'r+', or 'w', not %s" % mode)
        return s
Пример #5
0
def test_check_dtype_invalid():
    assert check_dtype('foo') == False
Пример #6
0
def test_dt_ubyte():
    assert check_dtype(ubyte)
Пример #7
0
def test_np_dt_uint8():
    assert check_dtype(np.uint8)
Пример #8
0
def open(path, mode='r', driver=None, width=None, height=None,
         count=None, crs=None, transform=None, dtype=None, nodata=None,
         **kwargs):
    """Open file at ``path`` in ``mode`` 'r' (read), 'r+' (read and
    write), or 'w' (write) and return a dataset Reader or Updater
    object.

    In write mode, a driver name such as "GTiff" or "JPEG" (see GDAL
    docs or ``gdal_translate --help`` on the command line),
    ``width`` (number of pixels per line) and ``height`` (number of
    lines), the ``count`` number of bands in the new file must be
    specified.  Additionally, the data type for bands such as
    ``rasterio.ubyte`` for 8-bit bands or ``rasterio.uint16`` for
    16-bit bands must be specified using the ``dtype`` argument.

    Parameters
    ----------
    mode: string
        "r" (read), "r+" (read/write), or "w" (write)
    driver: string
        driver code specifying the format name (e.g. "GTiff" or
        "JPEG"). See GDAL docs at
        http://www.gdal.org/formats_list.html (optional, required
        for writing).
    width: int
        number of pixels per line
        (optional, required for write)
    height: int
        number of lines
        (optional, required for write)
    count: int > 0
        number of bands
        (optional, required for write)
    dtype: rasterio.dtype
        the data type for bands such as ``rasterio.ubyte`` for
        8-bit bands or ``rasterio.uint16`` for 16-bit bands
        (optional, required for write)
    crs: dict or string
        Coordinate reference system
        (optional, recommended for write)
    transform: Affine instance
        Affine transformation mapping the pixel space to geographic
        space (optional, recommended for writing).
    nodata: number
        Defines pixel value to be interpreted as null/nodata
        (optional, recommended for write)

    Returns
    -------
    A ``DatasetReader`` or ``DatasetUpdater`` object.

    Notes
    -----
    In write mode, you must specify at least ``width``, ``height``,
    ``count`` and ``dtype``.

    A coordinate reference system for raster datasets in write mode
    can be defined by the ``crs`` argument. It takes Proj4 style
    mappings like

    .. code::

      {'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84',
       'no_defs': True}

    An affine transformation that maps ``col,row`` pixel coordinates
    to ``x,y`` coordinates in the coordinate reference system can be
    specified using the ``transform`` argument. The value should be
    an instance of ``affine.Affine``

    .. code:: python

        >>> from affine import Affine
        >>> Affine(0.5, 0.0, -180.0, 0.0, -0.5, 90.0)

    These coefficients are shown in the figure below.

    .. code::

      | x |   | a  b  c | | c |
      | y | = | d  e  f | | r |
      | 1 |   | 0  0  1 | | 1 |

      a: rate of change of X with respect to increasing column,
         i.e. pixel width
      b: rotation, 0 if the raster is oriented "north up"
      c: X coordinate of the top left corner of the top left pixel
      d: rotation, 0 if the raster is oriented "north up"
      e: rate of change of Y with respect to increasing row,
         usually a negative number (i.e. -1 * pixel height) if
         north-up.
      f: Y coordinate of the top left corner of the top left pixel

    A 6-element sequence of the affine transformation matrix
    coefficients in ``c, a, b, f, d, e`` order, (i.e. GDAL
    geotransform order) will be accepted until 1.0 (deprecated).

    A virtual filesystem can be specified. The ``vfs`` parameter may
    be an Apache Commons VFS style string beginning with "zip://" or
    "tar://"". In this case, the ``path`` must be an absolute path
    within that container.

    """
    if not isinstance(path, string_types):
        raise TypeError("invalid path: {0!r}".format(path))
    if mode and not isinstance(mode, string_types):
        raise TypeError("invalid mode: {0!r}".format(mode))
    if driver and not isinstance(driver, string_types):
        raise TypeError("invalid driver: {0!r}".format(driver))
    if dtype and not check_dtype(dtype):
        raise TypeError("invalid dtype: {0!r}".format(dtype))
    if transform:
        transform = guard_transform(transform)
    elif 'affine' in kwargs:
        affine = kwargs.pop('affine')
        transform = guard_transform(affine)

    # If there is no currently active GDAL/AWS environment, create one.
    defenv()

    # Get AWS credentials if we're attempting to access a raster
    # on S3.
    pth, archive, scheme = parse_path(path)
    if scheme == 's3':
        Env().get_aws_credentials()
        log.debug("AWS credentials have been obtained")

    # Create dataset instances and pass the given env, which will
    # be taken over by the dataset's context manager if it is not
    # None.
    if mode == 'r':
        from rasterio._io import RasterReader
        s = RasterReader(path)
    elif mode == 'r+':
        from rasterio._io import writer
        s = writer(path, mode)
    elif mode == 'r-':
        from rasterio._base import DatasetReader
        s = DatasetReader(path)
    elif mode == 'w':
        from rasterio._io import writer
        s = writer(path, mode, driver=driver,
                   width=width, height=height, count=count,
                   crs=crs, transform=transform, dtype=dtype,
                   nodata=nodata, **kwargs)
    else:
        raise ValueError(
            "mode string must be one of 'r', 'r+', or 'w', not %s" % mode)
    s.start()
    return s
Пример #9
0
def test_dt_ubyte():
    assert check_dtype(ubyte)
Пример #10
0
def test_np_dt_uint8():
    assert check_dtype(np.uint8)
Пример #11
0
def open(fp, mode='r', driver=None, width=None, height=None, count=None,
         crs=None, transform=None, dtype=None, nodata=None, sharing=False,
         **kwargs):
    """Open a dataset for reading or writing.

    The dataset may be located in a local file, in a resource located by
    a URL, or contained within a stream of bytes.

    In read ('r') or read/write ('r+') mode, no keyword arguments are
    required: these attributes are supplied by the opened dataset.

    In write ('w' or 'w+') mode, the driver, width, height, count, and dtype
    keywords are strictly required.

    Parameters
    ----------
    fp : str, file object, PathLike object, FilePath, or MemoryFile
        A filename or URL, a file object opened in binary ('rb') mode, a
        Path object, or one of the rasterio classes that provides the
        dataset-opening interface (has an open method that returns a
        dataset).
    mode : str, optional
        'r' (read, the default), 'r+' (read/write), 'w' (write), or
        'w+' (write/read).
    driver : str, optional
        A short format driver name (e.g. "GTiff" or "JPEG") or a list of
        such names (see GDAL docs at
        https://gdal.org/drivers/raster/index.html). In 'w' or 'w+' modes
        a single name is required. In 'r' or 'r+' modes the driver can
        usually be omitted. Registered drivers will be tried
        sequentially until a match is found. When multiple drivers are
        available for a format such as JPEG2000, one of them can be
        selected by using this keyword argument.
    width : int, optional
        The number of columns of the raster dataset. Required in 'w' or
        'w+' modes, it is ignored in 'r' or 'r+' modes.
    height : int, optional
        The number of rows of the raster dataset. Required in 'w' or
        'w+' modes, it is ignored in 'r' or 'r+' modes.
    count : int, optional
        The count of dataset bands. Required in 'w' or 'w+' modes, it is
        ignored in 'r' or 'r+' modes.
    crs : str, dict, or CRS; optional
        The coordinate reference system. Required in 'w' or 'w+' modes,
        it is ignored in 'r' or 'r+' modes.
    transform : Affine instance, optional
        Affine transformation mapping the pixel space to geographic
        space. Required in 'w' or 'w+' modes, it is ignored in 'r' or
        'r+' modes.
    dtype : str or numpy dtype
        The data type for bands. For example: 'uint8' or
        ``rasterio.uint16``. Required in 'w' or 'w+' modes, it is
        ignored in 'r' or 'r+' modes.
    nodata : int, float, or nan; optional
        Defines the pixel value to be interpreted as not valid data.
        Required in 'w' or 'w+' modes, it is ignored in 'r' or 'r+'
        modes.
    sharing : bool; optional
        To reduce overhead and prevent programs from running out of file
        descriptors, rasterio maintains a pool of shared low level
        dataset handles. When `True` this function will use a shared
        handle if one is available. Multithreaded programs must avoid
        sharing and should set *sharing* to `False`.
    kwargs : optional
        These are passed to format drivers as directives for creating or
        interpreting datasets. For example: in 'w' or 'w+' modes
        a `tiled=True` keyword argument will direct the GeoTIFF format
        driver to create a tiled, rather than striped, TIFF.

    Returns
    -------
    A ``DatasetReader`` or ``DatasetWriter`` object.

    Examples
    --------

    To open a GeoTIFF for reading using standard driver discovery and
    no directives:

    >>> import rasterio
    >>> with rasterio.open('example.tif') as dataset:
    ...     print(dataset.profile)

    To open a JPEG2000 using only the JP2OpenJPEG driver:

    >>> with rasterio.open(
    ...         'example.jp2', driver='JP2OpenJPEG') as dataset:
    ...     print(dataset.profile)

    To create a new 8-band, 16-bit unsigned, tiled, and LZW-compressed
    GeoTIFF with a global extent and 0.5 degree resolution:

    >>> from rasterio.transform import from_origin
    >>> with rasterio.open(
    ...         'example.tif', 'w', driver='GTiff', dtype='uint16',
    ...         width=720, height=360, count=8, crs='EPSG:4326',
    ...         transform=from_origin(-180.0, 90.0, 0.5, 0.5),
    ...         nodata=0, tiled=True, compress='lzw') as dataset:
    ...     dataset.write(...)
    """

    if not isinstance(fp, str):
        if not (
            hasattr(fp, "read")
            or hasattr(fp, "write")
            or isinstance(fp, (os.PathLike, MemoryFile, FilePath))
        ):
            raise TypeError("invalid path or file: {0!r}".format(fp))
    if mode and not isinstance(mode, str):
        raise TypeError("invalid mode: {0!r}".format(mode))
    if driver and not isinstance(driver, str):
        raise TypeError("invalid driver: {0!r}".format(driver))
    if dtype and not check_dtype(dtype):
        raise TypeError("invalid dtype: {0!r}".format(dtype))
    if nodata is not None:
        nodata = float(nodata)
    if transform:
        transform = guard_transform(transform)

    # Check driver/mode blacklist.
    if driver and is_blacklisted(driver, mode):
        raise RasterioIOError(
            "Blacklisted: file cannot be opened by "
            "driver '{0}' in '{1}' mode".format(driver, mode))

    # If the fp argument is a file-like object and can be adapted by
    # rasterio's FilePath we do so. Otherwise, we use a MemoryFile to
    # hold fp's contents and store that in an ExitStack attached to the
    # dataset object that we will return. When a dataset's close method
    # is called, this ExitStack will be unwound and the MemoryFile's
    # storage will be cleaned up.
    if mode == 'r' and hasattr(fp, 'read'):
        if have_vsi_plugin:
            return FilePath(fp).open(driver=driver, sharing=sharing, **kwargs)
        else:
            memfile = MemoryFile(fp.read())
            dataset = memfile.open(driver=driver, sharing=sharing, **kwargs)
            dataset._env.enter_context(memfile)
            return dataset

    elif mode in ('w', 'w+') and hasattr(fp, 'write'):
        memfile = MemoryFile()
        dataset = memfile.open(
            driver=driver,
            width=width,
            height=height,
            count=count,
            crs=crs,
            transform=transform,
            dtype=dtype,
            nodata=nodata,
            sharing=sharing,
            **kwargs
        )
        dataset._env.enter_context(memfile)

        # For the writing case we push an extra callback onto the
        # ExitStack. It ensures that the MemoryFile's contents are
        # copied to the open file object.
        def func(*args, **kwds):
            memfile.seek(0)
            fp.write(memfile.read())

        dataset._env.callback(func)
        return dataset

    # TODO: test for a shared base class or abstract type.
    elif isinstance(fp, (FilePath, MemoryFile)):
        if mode.startswith("r"):
            dataset = fp.open(driver=driver, sharing=sharing, **kwargs)

        # Note: FilePath does not support writing and an exception will
        # result from this.
        elif mode.startswith("w"):
            dataset = fp.open(
                driver=driver,
                width=width,
                height=height,
                count=count,
                crs=crs,
                transform=transform,
                dtype=dtype,
                nodata=nodata,
                sharing=sharing,
                **kwargs
            )
        return dataset

    # At this point, the fp argument is a string or path-like object
    # which can be converted to a string.
    else:
        raw_dataset_path = os.fspath(fp)
        path = _parse_path(raw_dataset_path)

        if mode == "r":
            dataset = DatasetReader(path, driver=driver, sharing=sharing, **kwargs)
        elif mode == "r+":
            dataset = get_writer_for_path(path, driver=driver)(
                path, mode, driver=driver, sharing=sharing, **kwargs
            )
        elif mode.startswith("w"):
            if not driver:
                driver = driver_from_extension(path)
            writer = get_writer_for_driver(driver)
            if writer is not None:
                dataset = writer(
                    path,
                    mode,
                    driver=driver,
                    width=width,
                    height=height,
                    count=count,
                    crs=crs,
                    transform=transform,
                    dtype=dtype,
                    nodata=nodata,
                    sharing=sharing,
                    **kwargs
                )
            else:
                raise DriverCapabilityError(
                    "Writer does not exist for driver: %s" % str(driver)
                )
        else:
            raise DriverCapabilityError(
                "mode must be one of 'r', 'r+', or 'w', not %s" % mode)

        return dataset
Пример #12
0
 def add_mask_band(self, dtype):
     maskband = ET.SubElement(self.vrtdataset, 'MaskBand')
     vrtrasterband = ET.SubElement(maskband, 'VRTRasterBand')
     vrtrasterband.attrib['dataType'] = _gdal_typename(dtype) if check_dtype(dtype) else dtype
     return vrtrasterband
Пример #13
0
def test_check_dtype_invalid():
    assert check_dtype('foo') == False
Пример #14
0
def open(path,
         mode='r',
         driver=None,
         width=None,
         height=None,
         count=None,
         crs=None,
         transform=None,
         dtype=None,
         nodata=None,
         **kwargs):
    """Open file at ``path`` in ``mode`` 'r' (read), 'r+' (read and
    write), or 'w' (write) and return a dataset Reader or Updater
    object.

    In write mode, a driver name such as "GTiff" or "JPEG" (see GDAL
    docs or ``gdal_translate --help`` on the command line),
    ``width`` (number of pixels per line) and ``height`` (number of
    lines), the ``count`` number of bands in the new file must be
    specified.  Additionally, the data type for bands such as
    ``rasterio.ubyte`` for 8-bit bands or ``rasterio.uint16`` for
    16-bit bands must be specified using the ``dtype`` argument.

    Parameters
    ----------
    mode: string
        "r" (read), "r+" (read/write), or "w" (write)
    driver: string
        driver code specifying the format name (e.g. "GTiff" or
        "JPEG"). See GDAL docs at
        http://www.gdal.org/formats_list.html (optional, required
        for writing).
    width: int
        number of pixels per line
        (optional, required for write)
    height: int
        number of lines
        (optional, required for write)
    count: int > 0
        number of bands
        (optional, required for write)
    dtype: rasterio.dtype
        the data type for bands such as ``rasterio.ubyte`` for
        8-bit bands or ``rasterio.uint16`` for 16-bit bands
        (optional, required for write)
    crs: dict or string
        Coordinate reference system
        (optional, recommended for write)
    transform: Affine instance
        Affine transformation mapping the pixel space to geographic
        space (optional, recommended for writing).
    nodata: number
        Defines pixel value to be interpreted as null/nodata
        (optional, recommended for write)

    Returns
    -------
    A ``DatasetReader`` or ``DatasetUpdater`` object.

    Notes
    -----
    In write mode, you must specify at least ``width``, ``height``,
    ``count`` and ``dtype``.

    A coordinate reference system for raster datasets in write mode
    can be defined by the ``crs`` argument. It takes Proj4 style
    mappings like

    .. code::

      {'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84',
       'no_defs': True}

    An affine transformation that maps ``col,row`` pixel coordinates
    to ``x,y`` coordinates in the coordinate reference system can be
    specified using the ``transform`` argument. The value should be
    an instance of ``affine.Affine``

    .. code:: python

        >>> from affine import Affine
        >>> transform = Affine(0.5, 0.0, -180.0, 0.0, -0.5, 90.0)

    These coefficients are shown in the figure below.

    .. code::

      | x |   | a  b  c | | c |
      | y | = | d  e  f | | r |
      | 1 |   | 0  0  1 | | 1 |

      a: rate of change of X with respect to increasing column,
         i.e. pixel width
      b: rotation, 0 if the raster is oriented "north up"
      c: X coordinate of the top left corner of the top left pixel
      d: rotation, 0 if the raster is oriented "north up"
      e: rate of change of Y with respect to increasing row,
         usually a negative number (i.e. -1 * pixel height) if
         north-up.
      f: Y coordinate of the top left corner of the top left pixel

    A 6-element sequence of the affine transformation matrix
    coefficients in ``c, a, b, f, d, e`` order, (i.e. GDAL
    geotransform order) will be accepted until 1.0 (deprecated).

    A virtual filesystem can be specified. The ``vfs`` parameter may
    be an Apache Commons VFS style string beginning with "zip://" or
    "tar://"". In this case, the ``path`` must be an absolute path
    within that container.

    """
    if not isinstance(path, string_types):
        raise TypeError("invalid path: {0!r}".format(path))
    if mode and not isinstance(mode, string_types):
        raise TypeError("invalid mode: {0!r}".format(mode))
    if driver and not isinstance(driver, string_types):
        raise TypeError("invalid driver: {0!r}".format(driver))
    if dtype and not check_dtype(dtype):
        raise TypeError("invalid dtype: {0!r}".format(dtype))
    if transform:
        transform = guard_transform(transform)
    elif 'affine' in kwargs:
        affine = kwargs.pop('affine')
        transform = guard_transform(affine)

    # Get AWS credentials if we're attempting to access a raster
    # on S3.
    pth, archive, scheme = parse_path(path)
    if scheme == 's3':
        Env().get_aws_credentials()
        log.debug("AWS credentials have been obtained")

    # Create dataset instances and pass the given env, which will
    # be taken over by the dataset's context manager if it is not
    # None.
    if mode == 'r':
        from rasterio._io import RasterReader
        s = RasterReader(path)
    elif mode == 'r+':
        from rasterio._io import writer
        s = writer(path, mode)
    elif mode == 'r-':
        from rasterio._base import DatasetReader
        s = DatasetReader(path)
    elif mode == 'w':
        from rasterio._io import writer
        s = writer(path,
                   mode,
                   driver=driver,
                   width=width,
                   height=height,
                   count=count,
                   crs=crs,
                   transform=transform,
                   dtype=dtype,
                   nodata=nodata,
                   **kwargs)
    else:
        raise ValueError(
            "mode string must be one of 'r', 'r+', or 'w', not %s" % mode)
    s.start()
    return s
Пример #15
0
def open(fp,
         mode='r',
         driver=None,
         width=None,
         height=None,
         count=None,
         crs=None,
         transform=None,
         dtype=None,
         nodata=None,
         **kwargs):
    """Open a dataset for reading or writing.

    The dataset may be located in a local file, in a resource located
    by a URL, or contained within a stream of bytes.

    To access a dataset within a zip file without unzipping the archive
    use an Apache VFS style zip:// URL like

      zip://path/to/archive.zip!path/to/example.tif

    In read ('r') or read/write ('r+') mode, no other keyword arguments
    are required: the attributes are supplied by the opened dataset.

    In write mode, a driver name such as "GTiff" or "JPEG" (see GDAL
    docs or ``gdal_translate --help`` on the command line), ``width``
    (number of pixels per line) and ``height`` (number of lines), the
    ``count`` number of bands in the new file must be specified.
    Additionally, the data type for bands such as ``rasterio.ubyte`` for
    8-bit bands or ``rasterio.uint16`` for 16-bit bands must be
    specified using the ``dtype`` argument.

    Parameters
    ----------
    fp: string or file
        A filename or URL, or file object opened in binary mode.
    mode: string
        "r" (read), "r+" (read/write), or "w" (write)
    driver: string
        Driver code specifying the format name (e.g. "GTiff" or
        "JPEG"). See GDAL docs at
        http://www.gdal.org/formats_list.html (optional, required
        for writing).
    width: int
        Number of pixels per line (optional, required for write).
    height: int
        Number of lines (optional, required for write).
    count: int > 0
        Count of bands (optional, required for write).
    dtype: rasterio.dtype
        the data type for bands such as ``rasterio.ubyte`` for
        8-bit bands or ``rasterio.uint16`` for 16-bit bands
        (optional, required for write)
    crs: dict or string
        Coordinate reference system (optional, recommended for write).
    transform: Affine instance
        Affine transformation mapping the pixel space to geographic
        space (optional, recommended for writing).
    nodata: number
        Defines pixel value to be interpreted as null/nodata
        (optional, recommended for write, will be broadcast to all
        bands).

    Returns
    -------
    A ``DatasetReader`` or ``DatasetUpdater`` object.

    Notes
    -----
    In write mode, you must specify at least ``width``, ``height``,
    ``count`` and ``dtype``.

    A coordinate reference system for raster datasets in write mode
    can be defined by the ``crs`` argument. It takes Proj4 style
    mappings like

    .. code::

      {'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84',
       'no_defs': True}

    An affine transformation that maps ``col,row`` pixel coordinates
    to ``x,y`` coordinates in the coordinate reference system can be
    specified using the ``transform`` argument. The value should be
    an instance of ``affine.Affine``

    .. code:: python

        >>> from affine import Affine
        >>> transform = Affine(0.5, 0.0, -180.0, 0.0, -0.5, 90.0)

    These coefficients are shown in the figure below.

    .. code::

      | x |   | a  b  c | | c |
      | y | = | d  e  f | | r |
      | 1 |   | 0  0  1 | | 1 |

      a: rate of change of X with respect to increasing column,
         i.e. pixel width
      b: rotation, 0 if the raster is oriented "north up"
      c: X coordinate of the top left corner of the top left pixel
      d: rotation, 0 if the raster is oriented "north up"
      e: rate of change of Y with respect to increasing row,
         usually a negative number (i.e. -1 * pixel height) if
         north-up.
      f: Y coordinate of the top left corner of the top left pixel

    A 6-element sequence of the affine transformation matrix
    coefficients in ``c, a, b, f, d, e`` order, (i.e. GDAL
    geotransform order) will be accepted until 1.0 (deprecated).

    A virtual filesystem can be specified. The ``vfs`` parameter may
    be an Apache Commons VFS style string beginning with "zip://" or
    "tar://"". In this case, the ``path`` must be an absolute path
    within that container.
    """
    if not isinstance(fp, string_types):
        if not (hasattr(fp, 'read') or hasattr(fp, 'write')):
            raise TypeError("invalid path or file: {0!r}".format(fp))
    if mode and not isinstance(mode, string_types):
        raise TypeError("invalid mode: {0!r}".format(mode))
    if driver and not isinstance(driver, string_types):
        raise TypeError("invalid driver: {0!r}".format(driver))
    if dtype and not check_dtype(dtype):
        raise TypeError("invalid dtype: {0!r}".format(dtype))
    if nodata is not None:
        nodata = float(nodata)
    if 'affine' in kwargs:
        # DeprecationWarning's are ignored by default
        with warnings.catch_warnings():
            warnings.warn(
                "The 'affine' kwarg in rasterio.open() is deprecated at 1.0 "
                "and only remains to ease the transition.  Please switch to "
                "the 'transform' kwarg.  See "
                "https://github.com/mapbox/rasterio/issues/86 for details.",
                DeprecationWarning,
                stacklevel=2)

            if transform:
                warnings.warn(
                    "Found both 'affine' and 'transform' in rasterio.open() - "
                    "choosing 'transform'")
                transform = transform
            else:
                transform = kwargs.pop('affine')

    if transform:
        transform = guard_transform(transform)

    # Check driver/mode blacklist.
    if driver and is_blacklisted(driver, mode):
        raise RasterioIOError("Blacklisted: file cannot be opened by "
                              "driver '{0}' in '{1}' mode".format(
                                  driver, mode))

    # Special case for file object argument.
    if mode == 'r' and hasattr(fp, 'read'):

        @contextmanager
        def fp_reader(fp):
            memfile = MemoryFile(fp.read())
            dataset = memfile.open()
            try:
                yield dataset
            finally:
                dataset.close()
                memfile.close()

        return fp_reader(fp)

    elif mode == 'w' and hasattr(fp, 'write'):

        @contextmanager
        def fp_writer(fp):
            memfile = MemoryFile()
            dataset = memfile.open(driver=driver,
                                   width=width,
                                   height=height,
                                   count=count,
                                   crs=crs,
                                   transform=transform,
                                   dtype=dtype,
                                   nodata=nodata,
                                   **kwargs)
            try:
                yield dataset
            finally:
                dataset.close()
                memfile.seek(0)
                fp.write(memfile.read())
                memfile.close()

        return fp_writer(fp)

    else:
        # The 'normal' filename or URL path.
        _, _, scheme = parse_path(fp)

        with Env() as env:
            # Get AWS credentials only if we're attempting to access a
            # raster using the S3 scheme.
            if scheme == 's3':
                env.get_aws_credentials()
                log.debug("AWS credentials have been obtained")

            # Create dataset instances and pass the given env, which will
            # be taken over by the dataset's context manager if it is not
            # None.
            if mode == 'r':
                s = DatasetReader(fp)
            elif mode == 'r-':
                warnings.warn("'r-' mode is deprecated, use 'r'",
                              DeprecationWarning)
                s = DatasetReader(fp)
            elif mode == 'r+':
                s = get_writer_for_path(fp)(fp, mode)
            elif mode == 'w':
                s = get_writer_for_driver(driver)(fp,
                                                  mode,
                                                  driver=driver,
                                                  width=width,
                                                  height=height,
                                                  count=count,
                                                  crs=crs,
                                                  transform=transform,
                                                  dtype=dtype,
                                                  nodata=nodata,
                                                  **kwargs)
            else:
                raise ValueError(
                    "mode must be one of 'r', 'r+', or 'w', not %s" % mode)
            s.start()
            return s
Пример #16
0
def test_check_dtype_invalid():
    assert not check_dtype('foo')
def open(fp, mode='r', driver=None, width=None, height=None, count=None,
         crs=None, transform=None, dtype=None, nodata=None, sharing=True,
         **kwargs):
    """Open a dataset for reading or writing.

    The dataset may be located in a local file, in a resource located by
    a URL, or contained within a stream of bytes.

    In read ('r') or read/write ('r+') mode, no keyword arguments are
    required: these attributes are supplied by the opened dataset.

    In write ('w' or 'w+') mode, the driver, width, height, count, and dtype
    keywords are strictly required.

    Parameters
    ----------
    fp : str, file object or pathlib.Path object
        A filename or URL, a file object opened in binary ('rb') mode,
        or a Path object.
    mode : str, optional
        'r' (read, the default), 'r+' (read/write), 'w' (write), or
        'w+' (write/read).
    driver : str, optional
        A short format driver name (e.g. "GTiff" or "JPEG") or a list of
        such names (see GDAL docs at
        http://www.gdal.org/formats_list.html). In 'w' or 'w+' modes
        a single name is required. In 'r' or 'r+' modes the driver can
        usually be omitted. Registered drivers will be tried
        sequentially until a match is found. When multiple drivers are
        available for a format such as JPEG2000, one of them can be
        selected by using this keyword argument.
    width, height : int, optional
        The numbers of rows and columns of the raster dataset. Required
        in 'w' or 'w+' modes, they are ignored in 'r' or 'r+' modes.
    count : int, optional
        The count of dataset bands. Required in 'w' or 'w+' modes, it is
        ignored in 'r' or 'r+' modes.
    dtype : str or numpy dtype
        The data type for bands. For example: 'uint8' or
        ``rasterio.uint16``. Required in 'w' or 'w+' modes, it is
        ignored in 'r' or 'r+' modes.
    crs : str, dict, or CRS; optional
        The coordinate reference system. Required in 'w' or 'w+' modes,
        it is ignored in 'r' or 'r+' modes.
    transform : Affine instance, optional
        Affine transformation mapping the pixel space to geographic
        space. Required in 'w' or 'w+' modes, it is ignored in 'r' or
        'r+' modes.
    nodata : int, float, or nan; optional
        Defines the pixel value to be interpreted as not valid data.
        Required in 'w' or 'w+' modes, it is ignored in 'r' or 'r+'
        modes.
    sharing : bool
        A flag that allows sharing of dataset handles. Default is
        `True`. Should be set to `False` in a multithreaded:w program.
    kwargs : optional
        These are passed to format drivers as directives for creating or
        interpreting datasets. For example: in 'w' or 'w+' modes
        a `tiled=True` keyword argument will direct the GeoTIFF format
        driver to create a tiled, rather than striped, TIFF.

    Returns
    -------
    A ``DatasetReader`` or ``DatasetWriter`` object.

    Examples
    --------

    To open a GeoTIFF for reading using standard driver discovery and
    no directives:

    >>> import rasterio
    >>> with rasterio.open('example.tif') as dataset:
    ...     print(dataset.profile)

    To open a JPEG2000 using only the JP2OpenJPEG driver:

    >>> with rasterio.open(
    ...         'example.jp2', driver='JP2OpenJPEG') as dataset:
    ...     print(dataset.profile)

    To create a new 8-band, 16-bit unsigned, tiled, and LZW-compressed
    GeoTIFF with a global extent and 0.5 degree resolution:

    >>> from rasterio.transform import from_origin
    >>> with rasterio.open(
    ...         'example.tif', 'w', driver='GTiff', dtype='uint16',
    ...         width=720, height=360, count=8, crs='EPSG:4326',
    ...         transform=from_origin(-180.0, 90.0, 0.5, 0.5),
    ...         nodata=0, tiled=True, compress='lzw') as dataset:
    ...     dataset.write(...)
    """

    if not isinstance(fp, string_types):
        if not (hasattr(fp, 'read') or hasattr(fp, 'write') or isinstance(fp, Path)):
            raise TypeError("invalid path or file: {0!r}".format(fp))
    if mode and not isinstance(mode, string_types):
        raise TypeError("invalid mode: {0!r}".format(mode))
    if driver and not isinstance(driver, string_types):
        raise TypeError("invalid driver: {0!r}".format(driver))
    if dtype and not check_dtype(dtype):
        raise TypeError("invalid dtype: {0!r}".format(dtype))
    if nodata is not None:
        nodata = float(nodata)
    if transform:
        transform = guard_transform(transform)

    # Check driver/mode blacklist.
    if driver and is_blacklisted(driver, mode):
        raise RasterioIOError(
            "Blacklisted: file cannot be opened by "
            "driver '{0}' in '{1}' mode".format(driver, mode))

    # Special case for file object argument.
    if mode == 'r' and hasattr(fp, 'read'):

        @contextmanager
        def fp_reader(fp):
            memfile = MemoryFile(fp.read())
            dataset = memfile.open(driver=driver, sharing=sharing)
            try:
                yield dataset
            finally:
                dataset.close()
                memfile.close()

        return fp_reader(fp)

    elif mode in ('w', 'w+') and hasattr(fp, 'write'):

        @contextmanager
        def fp_writer(fp):
            memfile = MemoryFile()
            dataset = memfile.open(driver=driver, width=width, height=height,
                                   count=count, crs=crs, transform=transform,
                                   dtype=dtype, nodata=nodata, sharing=sharing, **kwargs)
            try:
                yield dataset
            finally:
                dataset.close()
                memfile.seek(0)
                fp.write(memfile.read())
                memfile.close()

        return fp_writer(fp)

    else:
        # If a pathlib.Path instance is given, convert it to a string path.
        if isinstance(fp, Path):
            fp = str(fp)

        # The 'normal' filename or URL path.
        path = parse_path(fp)

        # Create dataset instances and pass the given env, which will
        # be taken over by the dataset's context manager if it is not
        # None.
        if mode == 'r':
            s = DatasetReader(path, driver=driver, sharing=sharing, **kwargs)
        elif mode == 'r+':
            s = get_writer_for_path(path)(path, mode, driver=driver, sharing=sharing, **kwargs)
        elif mode.startswith("w"):
            s = get_writer_for_driver(driver)(path, mode, driver=driver,
                                              width=width, height=height,
                                              count=count, crs=crs,
                                              transform=transform,
                                              dtype=dtype, nodata=nodata,
                                              sharing=sharing,
                                              **kwargs)
        else:
            raise ValueError(
                "mode must be one of 'r', 'r+', or 'w', not %s" % mode)
        return s
Пример #18
0
def write_geotiff(dataset,
                  filename,
                  time_index=None,
                  profile_override=None,
                  overwrite=False):
    """
    Write an xarray dataset to a geotiff
        Modified from datacube.helpers.write_geotiff to support:
            - dask lazy arrays,
            - arrays with no time dimension
            - Nodata values
            - Small rasters (row or cols < blocksize)
            - Nodata values
            - dtype checks and upcasting
            - existing output checks
        https://github.com/opendatacube/datacube-core/blob/develop/datacube/helpers.py
        Original code licensed under the Apache License, Version 2.0 (the "License");

    :param Union(str, Path) filename: Output filename
    :param xarray.Dataset dataset: xarray dataset containing multiple bands to write to file
    :param int time_index: time index to write to file
    :param dict profile_override: option dict, overrides rasterio file creation options.
    :param bool overwrite: Allow overwriting existing files.

    """

    filepath = Path(filename)
    if filepath.exists() and not overwrite:
        raise RuntimeError('Output file exists "{}"'.format(filename))

    profile_override = profile_override or {}
    profile_override = lcase_dict(
        profile_override)  # Sanitise user modifiable values

    dtype = get_dtype(dataset)

    if not check_dtype(dtype):  # Check for invalid dtypes
        dataset, dtype = upcast(dataset, dtype)

    if time_index is not None:
        dataset = dataset.isel(time=time_index)

    profile = lcase_dict(
        GTIFF_DEFAULTS.copy())  # Sanitise user modifiable values

    geobox = getattr(dataset, 'geobox', None)
    if geobox is None:
        raise ValueError(
            'Can only write datasets with specified `crs` attribute')

    height, width = geobox.shape

    nodatavals = get_nodatavals(dataset)
    try:
        nodata = nodatavals[0]
    except TypeError:
        nodata = None

    profile.update({
        'width': width,
        'height': height,
        'transform': geobox.affine,
        'crs': geobox.crs.crs_str,
        'count': len(dataset.data_vars),
        'nodata': nodata,
        'dtype': str(dtype)
    })
    profile.update(profile_override)

    # Block size must be smaller than the image size, and for geotiffs must be divisible by 16
    if profile['blockxsize'] > profile['width']:
        if profile['width'] < 16:
            profile['tiled'] = False
        else:
            profile['blockxsize'] = profile['width'] // 16 * 16

    if profile['blockysize'] > profile['height']:
        if profile['height'] < 16:
            profile['tiled'] = False
        else:
            profile['blockysize'] = profile['height'] // 16 * 16

    if not profile.get('tiled', False):
        profile.pop('blockxsize', None)
        profile.pop('blockysize', None)

    with rio.Env():
        with rio.open(str(filename), 'w', sharing=False, **profile) as dest:
            if hasattr(dataset, 'data_vars'):
                for bandnum, data in enumerate(dataset.data_vars.values(),
                                               start=1):
                    if hasattr(data, 'compute'):
                        data = data.compute()
                    dest.write(data.values, bandnum)
Пример #19
0
def open(fp, mode='r', driver=None, width=None, height=None, count=None,
         crs=None, transform=None, dtype=None, nodata=None, **kwargs):
    """Open a dataset for reading or writing.

    The dataset may be located in a local file, in a resource located
    by a URL, or contained within a stream of bytes.

    To access a dataset within a zip file without unzipping the archive
    use an Apache VFS style zip:// URL like

      zip://path/to/archive.zip!path/to/example.tif

    In read ('r') or read/write ('r+') mode, no other keyword arguments
    are required: the attributes are supplied by the opened dataset.

    In write mode, a driver name such as "GTiff" or "JPEG" (see GDAL
    docs or ``gdal_translate --help`` on the command line), ``width``
    (number of pixels per line) and ``height`` (number of lines), the
    ``count`` number of bands in the new file must be specified.
    Additionally, the data type for bands such as ``rasterio.ubyte`` for
    8-bit bands or ``rasterio.uint16`` for 16-bit bands must be
    specified using the ``dtype`` argument.

    Parameters
    ----------
    fp: string or file
        A filename or URL, or file object opened in binary mode.
    mode: string
        "r" (read), "r+" (read/write), or "w" (write)
    driver: string
        Driver code specifying the format name (e.g. "GTiff" or
        "JPEG"). See GDAL docs at
        http://www.gdal.org/formats_list.html (optional, required
        for writing).
    width: int
        Number of pixels per line (optional, required for write).
    height: int
        Number of lines (optional, required for write).
    count: int > 0
        Count of bands (optional, required for write).
    dtype: rasterio.dtype
        the data type for bands such as ``rasterio.ubyte`` for
        8-bit bands or ``rasterio.uint16`` for 16-bit bands
        (optional, required for write)
    crs: dict or string
        Coordinate reference system (optional, recommended for write).
    transform: Affine instance
        Affine transformation mapping the pixel space to geographic
        space (optional, recommended for writing).
    nodata: number
        Defines pixel value to be interpreted as null/nodata
        (optional, recommended for write, will be broadcast to all
        bands).

    Returns
    -------
    A ``DatasetReader`` or ``DatasetUpdater`` object.

    Notes
    -----
    In write mode, you must specify at least ``width``, ``height``,
    ``count`` and ``dtype``.

    A coordinate reference system for raster datasets in write mode
    can be defined by the ``crs`` argument. It takes Proj4 style
    mappings like

    .. code::

      {'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84',
       'no_defs': True}

    An affine transformation that maps ``col,row`` pixel coordinates
    to ``x,y`` coordinates in the coordinate reference system can be
    specified using the ``transform`` argument. The value should be
    an instance of ``affine.Affine``

    .. code:: python

        >>> from affine import Affine
        >>> transform = Affine(0.5, 0.0, -180.0, 0.0, -0.5, 90.0)

    These coefficients are shown in the figure below.

    .. code::

      | x |   | a  b  c | | c |
      | y | = | d  e  f | | r |
      | 1 |   | 0  0  1 | | 1 |

      a: rate of change of X with respect to increasing column,
         i.e. pixel width
      b: rotation, 0 if the raster is oriented "north up"
      c: X coordinate of the top left corner of the top left pixel
      d: rotation, 0 if the raster is oriented "north up"
      e: rate of change of Y with respect to increasing row,
         usually a negative number (i.e. -1 * pixel height) if
         north-up.
      f: Y coordinate of the top left corner of the top left pixel

    A 6-element sequence of the affine transformation matrix
    coefficients in ``c, a, b, f, d, e`` order, (i.e. GDAL
    geotransform order) will be accepted until 1.0 (deprecated).

    A virtual filesystem can be specified. The ``vfs`` parameter may
    be an Apache Commons VFS style string beginning with "zip://" or
    "tar://"". In this case, the ``path`` must be an absolute path
    within that container.
    """
    if not isinstance(fp, string_types):
        if not (hasattr(fp, 'read') or hasattr(fp, 'write')):
            raise TypeError("invalid path or file: {0!r}".format(fp))
    if mode and not isinstance(mode, string_types):
        raise TypeError("invalid mode: {0!r}".format(mode))
    if driver and not isinstance(driver, string_types):
        raise TypeError("invalid driver: {0!r}".format(driver))
    if dtype and not check_dtype(dtype):
        raise TypeError("invalid dtype: {0!r}".format(dtype))
    if nodata is not None:
        nodata = float(nodata)
    if 'affine' in kwargs:
        # DeprecationWarning's are ignored by default
        with warnings.catch_warnings():
            warnings.warn(
                "The 'affine' kwarg in rasterio.open() is deprecated at 1.0 "
                "and only remains to ease the transition.  Please switch to "
                "the 'transform' kwarg.  See "
                "https://github.com/mapbox/rasterio/issues/86 for details.",
                DeprecationWarning,
                stacklevel=2)

            if transform:
                warnings.warn(
                    "Found both 'affine' and 'transform' in rasterio.open() - "
                    "choosing 'transform'")
                transform = transform
            else:
                transform = kwargs.pop('affine')

    if transform:
        transform = guard_transform(transform)

    # Check driver/mode blacklist.
    if driver and is_blacklisted(driver, mode):
        raise RasterioIOError(
            "Blacklisted: file cannot be opened by "
            "driver '{0}' in '{1}' mode".format(driver, mode))

    # Special case for file object argument.
    if mode == 'r' and hasattr(fp, 'read'):

        @contextmanager
        def fp_reader(fp):
            memfile = MemoryFile(fp.read())
            dataset = memfile.open()
            try:
                yield dataset
            finally:
                dataset.close()
                memfile.close()

        return fp_reader(fp)

    elif mode == 'w' and hasattr(fp, 'write'):

        @contextmanager
        def fp_writer(fp):
            memfile = MemoryFile()
            dataset = memfile.open(driver=driver, width=width, height=height,
                                   count=count, crs=crs, transform=transform,
                                   dtype=dtype, nodata=nodata, **kwargs)
            try:
                yield dataset
            finally:
                dataset.close()
                memfile.seek(0)
                fp.write(memfile.read())
                memfile.close()

        return fp_writer(fp)

    else:
        # The 'normal' filename or URL path.
        _, _, scheme = parse_path(fp)

        with Env() as env:
            # Get AWS credentials only if we're attempting to access a
            # raster using the S3 scheme.
            if scheme == 's3':
                env.get_aws_credentials()
                log.debug("AWS credentials have been obtained")

            # Create dataset instances and pass the given env, which will
            # be taken over by the dataset's context manager if it is not
            # None.
            if mode == 'r':
                s = DatasetReader(fp)
            elif mode == 'r-':
                warnings.warn("'r-' mode is deprecated, use 'r'",
                              DeprecationWarning)
                s = DatasetReader(fp)
            elif mode == 'r+':
                s = get_writer_for_path(fp)(fp, mode)
            elif mode == 'w':
                s = get_writer_for_driver(driver)(fp, mode, driver=driver,
                                                  width=width, height=height,
                                                  count=count, crs=crs,
                                                  transform=transform,
                                                  dtype=dtype, nodata=nodata,
                                                  **kwargs)
            else:
                raise ValueError(
                    "mode must be one of 'r', 'r+', or 'w', not %s" % mode)
            return s
Пример #20
0
def write_geotiff(dataset,
                  filename,
                  time_index=None,
                  profile_override=None,
                  overwrite=False):
    """
    Write an xarray dataset to a geotiff
        Modified from datacube.helpers.write_geotiff to support:
            - dask lazy arrays,
            - arrays with no time dimension
            - Nodata values
            - Small rasters (row or cols < blocksize)
            - Nodata values
            - dtype checks and upcasting
            - existing output checks
        https://github.com/opendatacube/datacube-core/blob/develop/datacube/helpers.py
        Original code licensed under the Apache License, Version 2.0 (the "License");

    :param Union(str, Path) filename: Output filename
    :param xarray.Dataset dataset: xarray dataset containing multiple bands to write to file
    :param int time_index: time index to write to file
    :param dict profile_override: option dict, overrides rasterio file creation options.
    :param bool overwrite: Allow overwriting existing files.

    """

    filepath = Path(filename)
    if filepath.exists() and not overwrite:
        raise RuntimeError('Output file exists "{}"'.format(filename))

    profile_override = profile_override or {}

    try:
        dtypes = {val.dtype for val in dataset.data_vars.values()}
        assert len(dtypes) == 1  # Check for multiple dtypes
    except AttributeError:
        dtypes = [dataset.dtype]
    dtype = dtypes.pop()

    if not check_dtype(dtype):  # Check for invalid dtypes
        dataset, dtype = upcast(dataset, dtype)

    dimx = dataset.dims[dataset.crs.dimensions[1]]
    dimy = dataset.dims[dataset.crs.dimensions[0]]

    profile = GTIFF_DEFAULTS.copy()
    profile.update({
        'width': dimx,
        'height': dimy,
        'transform': dataset.affine,
        'crs': dataset.crs.crs_str,
        'count': len(dataset.data_vars),
        'dtype': str(dtype)
    })
    profile.update(profile_override)
    profile = lcase_dict(profile)

    blockx = profile.get('blockxsize')
    blocky = profile.get('blockysize')
    if (blockx and blockx > dimx) or (blocky and blocky > dimy):
        del profile['blockxsize']
        del profile['blockysize']
        profile['tiled'] = False

    with rio.open(str(filename), 'w', **profile) as dest:
        for bandnum, data in enumerate(dataset.data_vars.values(), start=1):
            try:
                nodata = data.nodata
                profile.update({'nodata': nodata})
            except AttributeError:
                pass

            if time_index is None:
                data = data.data
            else:
                data = data.isel(time=time_index).data

            if isinstance(data, dask.array.Array):
                data = data.compute()

            dest.write(data, bandnum)