Exemplo n.º 1
0
def pix_to_coord(
    transform_array: Union[List, np.ndarray],
    row: Union[int, np.ndarray],
    col: Union[List, np.ndarray],
) -> Tuple[np.ndarray, np.ndarray]:
    """
    Transform pixels to coordinates

    :param transform_array: Transform
    :type transform_array: List or np.ndarray
    :param row: row
    :type row: int or np.ndarray
    :param col: column
    :type col: List or np.ndarray
    :return: x,y
    :rtype: np.ndarray, np.ndarray
    """
    transform = Affine.from_gdal(
        transform_array[0],
        transform_array[1],
        transform_array[2],
        transform_array[3],
        transform_array[4],
        transform_array[5],
    )
    # Set the offset to ul (upper left)
    x, y = rasterio.transform.xy(transform, row, col, offset="ul")

    if not isinstance(x, int):
        x = np.array(x)
        y = np.array(y)

    return x, y
Exemplo n.º 2
0
def main():
    samplefile = r'bxk1-d-ck.idf'
    tiffile = samplefile.replace('.idf', '.geotiff')
    dtype = rasterio.float64
    driver = 'AAIGrid'
    crs = CRS.from_epsg(28992)

    # read data from idf file
    idffile = idfpy.IdfFile(filepath=samplefile, mode='rb')
    geotransform = idffile.geotransform
    height = idffile.header['nrow']
    width = idffile.header['ncol']
    nodata = idffile.header['nodata']
    transform = Affine.from_gdal(*geotransform)

    # write data from idf file to geotiff with rasterio
    profile = {
        'width': width,
        'height': height,
        'count': 1,
        'dtype': dtype,
        'driver': driver,
        'crs': crs,
        'transform': transform,
        'nodata': nodata,
    }

    # the default profile would be sufficient for the example, however the profile dict shows how to make the export
    # profile
    idffile.to_raster(tiffile, **profile)
Exemplo n.º 3
0
def example_reproject():
    import idfpy

    from matplotlib import pyplot as plt
    from rasterio import Affine
    from rasterio.crs import CRS
    from rasterio.warp import reproject, Resampling
    import numpy as np

    with idfpy.open('bxk1-d-ck.idf') as src:
        a = src.read(masked=True)
        nr, nc = src.header['nrow'], src.header['ncol']
        dx, dy = src.header['dx'], src.header['dy']
        src_transform = Affine.from_gdal(*src.geotransform)

    # define new grid transform (same extent, 10 times resolution)
    dst_transform = Affine.translation(src_transform.c, src_transform.f)
    dst_transform *= Affine.scale(dx / 10., -dy / 10.)

    # define coordinate system (here RD New)
    src_crs = CRS.from_epsg(28992)

    # initialize new data array
    b = np.empty((10 * nr, 10 * nc))

    # reproject using Rasterio
    reproject(
        source=a,
        destination=b,
        src_transform=src_transform,
        dst_transform=dst_transform,
        src_crs=src_crs,
        dst_crs=src_crs,
        resampling=Resampling.bilinear,
    )

    # result as masked array
    b = np.ma.masked_equal(b, a.fill_value)

    # plot images
    fig, axes = plt.subplots(nrows=2, ncols=1)
    axes[0].imshow(a.filled(np.nan))
    axes[0].set_title('bxk1 original')
    axes[1].imshow(b.filled(np.nan))
    axes[1].set_title('bxk1 resampled')
    plt.show()
def example_reproject():
    import idfpy

    from matplotlib import pyplot as plt
    from rasterio import Affine
    from rasterio.crs import CRS
    from rasterio.warp import reproject, Resampling
    import numpy as np

    with idfpy.open('bxk1-d-ck.idf') as src:
        a = src.read(masked=True)
        nr, nc = src.header['nrow'], src.header['ncol']
        dx, dy = src.header['dx'], src.header['dy']
        src_transform = Affine.from_gdal(*src.geotransform)

    # define new grid transform (same extent, 10 times resolution)
    dst_transform = Affine.translation(src_transform.c, src_transform.f)
    dst_transform *= Affine.scale(dx / 10., -dy / 10.)

    # define coordinate system (here RD New)
    src_crs = CRS.from_epsg(28992)

    # initialize new data array
    b = np.empty((10*nr, 10*nc))

    # reproject using Rasterio
    reproject(
        source=a,
        destination=b,
        src_transform=src_transform,
        dst_transform=dst_transform,
        src_crs=src_crs,
        dst_crs=src_crs,
        resampling=Resampling.bilinear,
        )

    # result as masked array
    b = np.ma.masked_equal(b, a.fill_value)

    # plot images
    fig, axes = plt.subplots(nrows=2, ncols=1)
    axes[0].imshow(a.filled(np.nan))
    axes[0].set_title('bxk1 original')
    axes[1].imshow(b.filled(np.nan))
    axes[1].set_title('bxk1 resampled')
    plt.show()
Exemplo n.º 5
0
    def to_raster(self, fp=None, epsg=28992, driver='AAIGrid'):
        """export Idf to a geotiff"""
        self.check_read()

        if fp is None:
            fp = self.filepath.replace('.idf', '.geotiff')
            logging.warning('no filepath was given, exported to {fp}'.format(fp=fp))

        # set profile
        profile = {
            'width': self.header['ncol'],
            'height': self.header['nrow'],
            'transform': Affine.from_gdal(*self.geotransform),
            'nodata': self.header['nodata'],
            'count': 1,
            'dtype': rasterio.float64,
            'driver': driver,
            'crs': CRS.from_epsg(epsg),
        }

        logging.info('writing to {f:}'.format(f=fp))
        with rasterio.open(fp, 'w', **profile) as dst:
            dst.write(self.masked_data.astype(profile['dtype']), 1)
Exemplo n.º 6
0
    # Consider a 512 x 512 raster centered on 0 degrees E and 0 degrees N
    # with each pixel covering 15".
    rows, cols = src_shape = (512, 512)
    dpp = 1.0 / 240  # decimal degrees per pixel
    # The following is equivalent to
    # A(dpp, 0, -cols*dpp/2, 0, -dpp, rows*dpp/2).
    src_transform = A.translation(-cols * dpp / 2, rows * dpp / 2) * A.scale(
        dpp, -dpp)
    src_crs = {'init': 'EPSG:4326'}
    source = numpy.ones(src_shape, numpy.uint8) * 255

    # Prepare to reproject this rasters to a 1024 x 1024 dataset in
    # Web Mercator (EPSG:3857) with origin at -8928592, 2999585.
    dst_shape = (1024, 1024)
    dst_transform = A.from_gdal(-237481.5, 425.0, 0.0, 237536.4, 0.0, -425.0)
    dst_transform = dst_transform.to_gdal()
    dst_crs = {'init': 'EPSG:3857'}
    destination = numpy.zeros(dst_shape, numpy.uint8)

    reproject(source,
              destination,
              src_transform=src_transform,
              src_crs=src_crs,
              dst_transform=dst_transform,
              dst_crs=dst_crs,
              resampling=RESAMPLING.nearest)

    # Assert that the destination is only partly filled.
    assert destination.any()
    assert not destination.all()
Exemplo n.º 7
0
with rasterio.drivers():

    # Consider a 512 x 512 raster centered on 0 degrees E and 0 degrees N
    # with each pixel covering 15".
    rows, cols = src_shape = (512, 512)
    dpp = 1.0/240 # decimal degrees per pixel
    # The following is equivalent to 
    # A(dpp, 0, -cols*dpp/2, 0, -dpp, rows*dpp/2).
    src_transform = A.translation(-cols*dpp/2, rows*dpp/2) * A.scale(dpp, -dpp)
    src_crs = {'init': 'EPSG:4326'}
    source = numpy.ones(src_shape, numpy.uint8)*255

    # Prepare to reproject this rasters to a 1024 x 1024 dataset in
    # Web Mercator (EPSG:3857) with origin at -8928592, 2999585.
    dst_shape = (1024, 1024)
    dst_transform = A.from_gdal(-237481.5, 425.0, 0.0, 237536.4, 0.0, -425.0)
    dst_transform = dst_transform.to_gdal()
    dst_crs = {'init': 'EPSG:3857'}
    destination = numpy.zeros(dst_shape, numpy.uint8)

    reproject(
        source, 
        destination, 
        src_transform=src_transform,
        src_crs=src_crs,
        dst_transform=dst_transform,
        dst_crs=dst_crs,
        resampling=RESAMPLING.nearest)

    # Assert that the destination is only partly filled.
    assert destination.any()
Exemplo n.º 8
0
def reproject_dataset(dataset: xr.Dataset,
                      from_dataset: xr.Dataset,
                      interp: str = "bilinear") -> xr.Dataset:
    """
    Reproject dataset, and return the corresponding xarray.DataSet

    :param dataset: Dataset to reproject
    :type dataset: xr.Dataset
    :param from_dataset: Dataset to get projection from
    :type from_dataset: xr.Dataset
    :param interp: interpolation method
    :type interp: str
    :return: reprojected dataset
    :rtype: xr.Dataset
    """

    # Copy dataset
    reprojected_dataset = copy.copy(from_dataset)

    interpolation_method = Resampling.bilinear
    if interp == "bilinear":
        interpolation_method = Resampling.bilinear
    elif interp == "nearest":
        interpolation_method = Resampling.nearest
    else:
        logging.warning(
            "Interpolation method not available, use default 'bilinear'")

    src_transform = Affine.from_gdal(
        dataset["trans"].data[0],
        dataset["trans"].data[1],
        dataset["trans"].data[2],
        dataset["trans"].data[3],
        dataset["trans"].data[4],
        dataset["trans"].data[5],
    )
    dst_transform = Affine.from_gdal(
        from_dataset["trans"].data[0],
        from_dataset["trans"].data[1],
        from_dataset["trans"].data[2],
        from_dataset["trans"].data[3],
        from_dataset["trans"].data[4],
        from_dataset["trans"].data[5],
    )

    source_array = dataset["im"].data
    dest_array = np.zeros_like(from_dataset["im"].data)
    dest_array[:, :] = -9999

    src_crs = rasterio.crs.CRS.from_dict(dataset.attrs["georef"])
    dst_crs = rasterio.crs.CRS.from_dict(from_dataset.attrs["georef"])

    # reproject
    reproject(
        source=source_array,
        destination=dest_array,
        src_transform=src_transform,
        src_crs=src_crs,
        dst_transform=dst_transform,
        dst_crs=dst_crs,
        resampling=interpolation_method,
        src_nodata=dataset.attrs["no_data"],
        dst_nodata=-9999,
    )

    # change output dataset
    dest_array[dest_array == -9999] = np.nan
    reprojected_dataset["im"].data = dest_array
    reprojected_dataset.attrs["no_data"] = dataset.attrs["no_data"]

    return reprojected_dataset
Exemplo n.º 9
0
def save_tif(dataset: xr.Dataset,
             filename: str,
             new_array=None,
             no_data: float = -32768) -> xr.Dataset:
    """
    Write a Dataset in a tiff file.
    If new_array is set, new_array is used as data.

    :param dataset: dataset
    :param filename:  output filename
    :param new_array:  new array to write
    :param no_data:  value of nodata to use
    :return: dataset
    """

    # update from dataset
    previous_profile = {}
    previous_profile["crs"] = rasterio.crs.CRS.from_dict(
        dataset.attrs["georef"])
    previous_profile["transform"] = Affine.from_gdal(
        dataset["trans"].data[0],
        dataset["trans"].data[1],
        dataset["trans"].data[2],
        dataset["trans"].data[3],
        dataset["trans"].data[4],
        dataset["trans"].data[5],
    )

    data = dataset["im"].data
    if new_array is not None:
        data = new_array

    if len(dataset["im"].shape) == 2:
        row, col = data.shape
        with rasterio.open(
                filename,
                mode="w+",
                driver="GTiff",
                width=col,
                height=row,
                count=1,
                dtype=data.dtype,
                crs=previous_profile["crs"],
                transform=previous_profile["transform"],
        ) as source_ds:
            source_ds.nodata = no_data
            source_ds.write(data, 1)

    else:
        row, col, depth = data.shape
        with rasterio.open(
                filename,
                mode="w+",
                driver="GTiff",
                width=col,
                height=row,
                count=depth,
                dtype=data.dtype,
                crs=previous_profile["crs"],
                transform=previous_profile["transform"],
        ) as source_ds:
            for dsp in range(1, depth + 1):
                source_ds.write(data[:, :, dsp - 1], dsp)

    new_dataset = copy.deepcopy(dataset)
    # update dataset input_img with new filename
    new_dataset.attrs["input_img"] = filename

    return new_dataset
Exemplo n.º 10
0
def translate_to_coregistered_geometry(
    dem: xr.Dataset,
    ref: xr.Dataset,
    dx: int,
    dy: int,
    interpolator: str = "bilinear",
) -> Tuple[xr.Dataset, xr.Dataset]:
    """
    Translate both DSMs to their coregistered geometry.

    Note that :

    The ref georef-origin is assumed to be the reference

    The ref shall be the one resampled at dem's
    georef-grid as it supposedly is the cleaner one.

    Hence, dem is only cropped, and ref is
    projected on dem's georef-grid, so it might be resampled.
    However, the dem's georef-origin is translated
    to the ref's georef-origin, which is considered the reference.

    :param dem: dataset, master dem
    :type dem: xr.Dataset
    :param ref: dataset, slave dem
    :type ref: xr.Dataset
    :param dx: f, dx value in pixels
    :type dx: int
    :param dy: f, dy value in pixels
    :type dy: int
    :param interpolator: gdal interpolator
    :type interpolator: str
    :return: coregistered DEM as datasets
    :rtype: xr.Dataset, xr.Dataset
    """

    #
    # Translate the georef-origin of dem based on dx and dy values
    #   -> this makes dem coregistered on ref
    dem = translate(dem, dx, dy)

    #
    # Intersect and reproject both dsms.
    #   -> intersect them to the biggest common grid
    #       now that they have been shifted
    #   -> dem is then cropped with intersect so that it lies within intersect
    #       but is not resampled in the process
    #   -> reproject ref to dem's georef-grid,
    #       the intersection grid sampled on dem's grid
    #
    transform_dem = Affine.from_gdal(
        dem["trans"].data[0],
        dem["trans"].data[1],
        dem["trans"].data[2],
        dem["trans"].data[3],
        dem["trans"].data[4],
        dem["trans"].data[5],
    )
    bounds_dem = rasterio.transform.array_bounds(dem["im"].data.shape[1],
                                                 dem["im"].data.shape[0],
                                                 transform_dem)

    transform_ref = Affine.from_gdal(
        ref["trans"].data[0],
        ref["trans"].data[1],
        ref["trans"].data[2],
        ref["trans"].data[3],
        ref["trans"].data[4],
        ref["trans"].data[5],
    )
    bounds_ref = rasterio.transform.array_bounds(ref["im"].data.shape[1],
                                                 ref["im"].data.shape[0],
                                                 transform_ref)

    intersection_roi = (
        max(bounds_dem[0], bounds_ref[0]),
        max(bounds_dem[1], bounds_ref[1]),
        min(bounds_dem[2], bounds_ref[2]),
        min(bounds_dem[3], bounds_ref[3]),
    )

    # get  crop
    polygon_roi = bounding_box_to_polygon(
        intersection_roi[0],
        intersection_roi[1],
        intersection_roi[2],
        intersection_roi[3],
    )
    geom_like_polygon = {"type": "Polygon", "coordinates": [polygon_roi]}

    # crop dem
    srs_dem = rasterio.open(
        " ",
        mode="w+",
        driver="GTiff",
        width=dem["im"].data.shape[1],
        height=dem["im"].data.shape[0],
        count=1,
        dtype=dem["im"].data.dtype,
        crs=dem.attrs["georef"],
        transform=transform_dem,
    )
    srs_dem.write(dem["im"].data, 1)
    new_cropped_dem, new_cropped_dem_transform = rasterio.mask.mask(
        srs_dem, [geom_like_polygon], all_touched=True, crop=True)

    # create datasets
    reproj_dem = copy.copy(dem)
    reproj_dem["trans"].data = np.array(new_cropped_dem_transform.to_gdal())
    reproj_dem = read_img_from_array(
        new_cropped_dem[0, :, :],
        from_dataset=reproj_dem,
        no_data=dem.attrs["no_data"],
    )

    # Reference DEM is reprojected into the sec DEM's georef-grid
    # Crop and resample are performed on the reference DEM
    reproj_ref = reproject_dataset(ref, reproj_dem, interp=interpolator)

    return reproj_dem, reproj_ref
Exemplo n.º 11
0
import xarray as xr

#import cartopy.crs as ccrs
#import cartopy.feature as cfeature
#import matplotlib.ticker as mticker

#from pyproj import Proj, transform
#from rasterio import Affine
#from rasterio.warp import reproject, Resampling
#from mpl_toolkits.basemap import Basemap




geotransform = (-3272421.457337171, 2539.703, 0.0, 3790842.1060354356, 0.0, -2539.703)
fwd = A.from_gdal(*geotransform)
col, row = 0, 100
fwd * (col, row)

'PROJCS["unnamed",GEOGCS["Coordinate System imported from GRIB file",DATUM["unknown",SPHEROID["Sphere",6371200,0]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",25],PARAMETER["standard_parallel_2",25],PARAMETER["latitude_of_origin",25],PARAMETER["central_meridian",265],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["Metre",1]]'


import rasterio.crs

  # order depends on convention
transform = A(geotransform)


with rasterio.Env():

    # As source: a 512 x 512 raster centered on 0 degrees E and 0
Exemplo n.º 12
0
def vectorizeRaster(infile, outfile, classes, classfile, weight, nodata,
                    smoothing, band, cartoCSS, axonometrize, nosimple,
                    setNoData, nibbleMask, outvar):
    band = int(band)
    src = gdal.Open(infile)
    bandData = src.GetRasterBand(band)
    inarr = bandData.ReadAsArray()

    if (inarr is None) or (len(inarr) == 0):
        gdal.SetConfigOption('GDAL_NETCDF_BOTTOMUP', 'NO')
        src = gdal.Open(infile)
        bandData = src.GetRasterBand(band)
        inarr = bandData.ReadAsArray()

    oshape = np.shape(inarr)

    if len(src.GetProjectionRef()) > 0:
        new_cs = osr.SpatialReference()
        new_cs.ImportFromEPSG(4326)
        old_cs = osr.SpatialReference()
        old_cs.ImportFromWkt(src.GetProjectionRef())
        transform = osr.CoordinateTransformation(old_cs, new_cs)

    oaff = Affine.from_gdal(*src.GetGeoTransform())

    bbox = src.GetGeoTransform()
    nodata = None

    if type(bandData.GetNoDataValue()) == float:
        nodata = bandData.GetNoDataValue()

    if (type(setNoData) == int or type(setNoData) == float) and hasattr(
            inarr, 'mask'):
        inarr[np.where(inarr.mask == True)] = setNoData
        nodata = True

    nlat, nlon = np.shape(inarr)
    dataY = np.arange(nlat) * bbox[5] + bbox[3]
    dataX = np.arange(nlon) * bbox[1] + bbox[0]

    if len(src.GetProjectionRef()) > 0:
        ul = transform.TransformPoint(min(dataX), max(dataY))
        ll = transform.TransformPoint(min(dataX), min(dataY))
        ur = transform.TransformPoint(max(dataX), max(dataY))
        lr = transform.TransformPoint(max(dataX), min(dataY))
        simplestY1 = (abs(ul[1] - ll[1]) / float(oshape[0]))
        simplestY2 = (abs(ur[1] - lr[1]) / float(oshape[0]))
        simplestX1 = (abs(ur[0] - ul[0]) / float(oshape[1]))
        simplestX2 = (abs(lr[0] - ll[0]) / float(oshape[1]))
        simplest = 2 * max(simplestX1, simplestY1, simplestX2, simplestY2)
    else:
        simplestY = ((max(dataY) - min(dataY)) / float(oshape[0]))
        simplestX = ((max(dataX) - min(dataX)) / float(oshape[1]))
        simplest = 2 * max(simplestX, simplestY)

    if nodata == 'min':
        maskArr = np.zeros(inarr.shape, dtype=np.bool)
        maskArr[np.where(inarr == inarr.min())] = True
        inarr = np.ma.array(inarr, mask=maskArr)
        del maskArr
    elif type(nodata) == int or type(nodata) == float:
        maskArr = np.zeros(inarr.shape, dtype=np.bool)
        maskArr[np.where(inarr == nodata)] = True
        inarr[np.where(inarr == nodata)] = np.nan
        inarr = np.ma.array(inarr, mask=maskArr)
        del maskArr
    elif nodata == None or np.isnan(nodata) or nodata:
        maskArr = np.zeros(inarr.shape, dtype=np.bool)
        inarr = np.ma.array(inarr, mask=maskArr)
        del maskArr
    elif (type(nodata) == int or type(nodata) == float) and hasattr(
            inarr, 'mask'):
        nodata = True

    if nibbleMask:
        inarr.mask = maximum_filter(inarr.mask, size=3)

    if smoothing and smoothing > 1:
        inarr, oaff = zoomSmooth(inarr, smoothing, oaff)
    else:
        smoothing = 1

    with open(classfile, 'r') as ofile:
        classifiers = ofile.read().split(',')
        classRas, breaks = classifyManual(
            inarr,
            np.array(classifiers).astype(inarr.dtype))

    # filtering for speckling
    classRas = median_filter(classRas, size=2)

    # print out cartocss for classes
    if cartoCSS:
        for i in breaks:
            click.echo('[value = ' + str(breaks[i]) +
                       '] { polygon-fill: @class' + str(i) + '}')

    if outfile:
        outputHandler = tools.dataOutput(True)
    else:
        outputHandler = tools.dataOutput()
    #polys = []
    #vals = []
    for i, br in enumerate(breaks):
        if i == 0:
            continue
        tRas = (classRas == i).astype(np.uint8)
        if nodata:
            tRas[np.where(classRas == 0)] = 0

        for feature, shapes in features.shapes(np.asarray(tRas, order='C'),
                                               transform=oaff):
            if shapes == 1:
                featurelist = []
                for c, f in enumerate(feature['coordinates']):
                    if len(src.GetProjectionRef()) > 0:
                        for ix in range(len(f)):
                            px = transform.TransformPoint(f[ix][0], f[ix][1])
                            lst = list()
                            lst.append(px[0])
                            lst.append(px[1])
                            f[ix] = tuple(lst)
                    if len(f) > 3 or c == 0:
                        if axonometrize:
                            f = np.array(f)
                            f[:, 1] += (axonometrize * br)
                        if nosimple:
                            poly = Polygon(f)
                        else:
                            poly = Polygon(f).simplify(simplest /
                                                       float(smoothing),
                                                       preserve_topology=True)
                            if c == 0:
                                poly = polygon.orient(poly, sign=-1.0)
                            else:
                                poly = polygon.orient(poly, sign=1.0)
                            featurelist.append(poly)
                if len(featurelist) != 0:
                    #polys.append(MultiPolygon(featurelist))
                    #vals.append(breaks[br])
                    outputHandler.out({
                        'type':
                        'Feature',
                        'geometry':
                        mapping(MultiPolygon(featurelist)),
                        'properties': {
                            outvar: breaks[br]
                        }
                    })

    #for pa in range(0,len(polys)):
    #    for pb in range(0,len(polys)):
    #        if pa==pb:
    #            continue
    #        if polys[pa].contains(polys[pb]) & (polys[pa].area>polys[pb].area):
    #            try:
    #                polys[pa] = polys[pa].difference(polys[pb])
    #                print polys[pa].area
    #                print '---'
    #                break
    #            except:
    #                a = 1
    #
    #for pc in range(0,len(polys)):
    #    outputHandler.out({
    #        'type': 'Feature',
    #        'geometry': mapping(polys[pc]),
    #        'properties': {
    #            outvar: vals[pc]
    #        }
    #    })
    if outfile:
        with open(outfile, 'w') as ofile:
            ofile.write(
                json.dumps({
                    "type": "FeatureCollection",
                    "features": outputHandler.data
                }))