Exemple #1
0
 def test_assign_large_hazard_subset_pass(self):
     """Test assign_centroids with raster hazard"""
     exp = Exposures()
     exp.set_from_raster(HAZ_DEMO_FL, window=Window(10, 20, 50, 60))
     exp.gdf.latitude[[0, 1]] = exp.gdf.latitude[[1, 0]]
     exp.gdf.longitude[[0, 1]] = exp.gdf.longitude[[1, 0]]
     exp.check()
     haz = Hazard('FL')
     haz.set_raster([HAZ_DEMO_FL])
     haz.raster_to_vector()
     exp.assign_centroids(haz)
     assigned_centroids = haz.centroids.select(
         sel_cen=exp.gdf[INDICATOR_CENTR + 'FL'].values)
     np.testing.assert_array_equal(assigned_centroids.lat, exp.gdf.latitude)
     np.testing.assert_array_equal(assigned_centroids.lon,
                                   exp.gdf.longitude)
def detect_data(dataset, shapes, bounds):
    """Detect if any data pixels are found in shapes.

    Typically this is performed against a reduced resolution version of a data
    file as a pre-screening step.

    Parameters
    ----------
    dataset : open rasterio dataset
    shapes : list-like of GeoJSON features
    bounds : list-like of [xmin, ymin, xmax, ymax]

    Returns
    -------
    bool
        Returns True if there are data pixels present
    """
    window = get_window(dataset, bounds)
    raster_window = Window(0, 0, dataset.width, dataset.height)

    try:
        # This will raise a WindowError if windows do not overlap
        window = window.intersection(raster_window)
    except WindowError:
        # no overlap => no data
        return False

    data = dataset.read(1, window=window)
    nodata = int(dataset.nodata)

    if not np.any(data != nodata):
        # entire window is nodata
        return False

    # create mask
    # note: this intentionally uses all_touched=True
    mask = geometry_mask(
        shapes,
        transform=dataset.window_transform(window),
        out_shape=data.shape,
        all_touched=True,
    ) | (data == nodata)

    if np.any(data[~mask]):
        return True

    return False
Exemple #3
0
def AggregateMetric(datafile, output, fun, dtype, nodata, processes, **kwargs):

    mask_file = config.filename('subgrid_mask')

    with rio.open(mask_file) as ds:

        mask = ds.read(1)
        height, width = mask.shape

        out = np.zeros_like(mask, dtype=dtype)

        with rio.open(datafile) as datas:

            resolution = int(ds.transform.a // datas.transform.a)
            half_resolution = int(resolution // 2)
            arguments = list()

            for i in range(height):
                for j in range(width):

                    if mask[i, j] == 0:

                        x, y = ds.xy(i, j)
                        di, dj = datas.index(x, y)

                        window = Window(dj - half_resolution,
                                        di - half_resolution, resolution,
                                        resolution)

                        arguments.append((AggregateCell, i, j, datafile,
                                          window, fun, nodata, kwargs))

        with Pool(processes=processes) as pool:

            pooled = pool.imap_unordered(starcall, arguments)

            with click.progressbar(pooled, length=len(arguments)) as iterator:
                for i, j, value in iterator:
                    out[i, j] = value

        out[mask == 1] = nodata

        profile = ds.profile.copy()
        profile.update(dtype=dtype, nodata=nodata, compress='deflate')

        with rio.open(output, 'w', **profile) as dst:
            dst.write(out, 1)
Exemple #4
0
def _check_offsets(block, out_data_, window_, oleft, otop, ocols, orows, left_, top_):

    # Check if the data were read at larger
    # extents than the write bounds.

    obottom = otop - (orows * abs(block.gw.celly))
    oright = oleft + (ocols * abs(block.gw.cellx))

    bottom_ = top_ - (window_.height * abs(block.gw.celly))
    right_ = left_ - (window_.width * abs(block.gw.cellx))

    left_diff = 0
    right_diff = 0
    top_diff = 0
    bottom_diff = 0

    if left_ < oleft:
        left_diff = int(abs(oleft - left_) / abs(block.gw.cellx))
        right_diff = out_data_.shape[-1]
    elif right_ > oright:
        left_diff = 0
        right_diff = int(abs(oright - right_) / abs(block.gw.cellx))

    if bottom_ < obottom:
        bottom_diff = int(abs(obottom - bottom_) / abs(block.gw.celly))
        top_diff = 0
    elif top_ > otop:
        bottom_diff = out_data_.shape[-2]
        top_diff = int(abs(otop - top_) / abs(block.gw.celly))

    if (left_diff != 0) or (top_diff != 0) or (bottom_diff != 0) or (right_diff != 0):

        dshape = out_data_.shape

        if len(dshape) == 2:
            out_data_ = out_data_[top_diff:bottom_diff, left_diff:right_diff]
        elif len(dshape) == 3:
            out_data_ = out_data_[:, top_diff:bottom_diff, left_diff:right_diff]
        elif len(dshape) == 4:
            out_data_ = out_data_[:, :, top_diff:bottom_diff, left_diff:right_diff]

        window_ = Window(col_off=window_.col_off,
                         row_off=window_.row_off,
                         width=out_data_.shape[-1],
                         height=out_data_.shape[-2])

    return out_data_, window_
Exemple #5
0
def sample_gen(dataset, xy, indexes=None, masked=False):
    """Sample pixels from a dataset

    Parameters
    ----------
    dataset : rasterio Dataset
        Opened in "r" mode.
    xy : iterable
        Pairs of x, y coordinates in the dataset's reference system.
    indexes : int or list of int
        Indexes of dataset bands to sample.
    masked : bool, default: False
        Whether to mask samples that fall outside the extent of the
        dataset.

    Yields
    ------
    array
        A array of length equal to the number of specified indexes
        containing the dataset values for the bands corresponding to
        those indexes.

    """
    index = dataset.index
    read = dataset.read

    if indexes is None:
        indexes = dataset.indexes
    elif isinstance(indexes, int):
        indexes = [indexes]

    for x, y in xy:

        row_off, col_off = index(x, y)

        if row_off < 0 or col_off < 0 or row_off >= dataset.height or col_off >= dataset.width:
            data = numpy.ones((len(indexes),), dtype=dataset.dtypes[0]) * (dataset.nodata or 0)
            if masked:
                mask = [False if MaskFlags.all_valid in dataset.mask_flag_enums[i - 1] else True for i in indexes]
                yield numpy.ma.array(data, mask=mask)
            else:
                yield data

        else:
            window = Window(col_off, row_off, 1, 1)
            data = read(indexes, window=window, masked=masked)
            yield data[:, 0, 0]
def _process_blocks(
    blocks,
    sources,
    col_size,
    row_size,
    step_width,
    step_height,
    width,
    height,
    calc_area,
):
    """
    Loops over all blocks and reads first input raster to get coordinates.
    Append values from all input rasters
    :param blocks: list of blocks to process
    :param src_rasters: list of input rasters
    :param col_size: pixel width
    :param row_size: pixel height
    :param step_width: block width
    :param step_height: block height
    :param width: image width
    :param height: image height
    :return: Table of Lat/Lon coord and corresponding raster values
    """

    for block in blocks:

        col = block[0]
        row = block[1]

        w_width = _get_window_size(col, step_width, width)
        w_height = _get_window_size(row, step_height, height)
        window = Window(col, row, w_width, w_height)

        src = sources[0]

        left, top, right, bottom = src.window_bounds(window)
        w = src.read(1, window=window)
        lat_lon = _get_lat_lon(w, col_size, row_size, left, bottom, calc_area)
        del w

        if lat_lon.shape[0] > 0:

            values = _get_values(sources, window)

            yield (pd.concat([lat_lon, values], axis=1),
                   )  # need to pack data frame into tuple
Exemple #7
0
def extract_tile(mosaicdb, lon, lat, tile_size, crs):
    """
    Extracts a tile of tile_size from a bigger tile.
    Params:
	mosaicdb: Rasterio DataReader
	lon: longitude value in ESPG:4326 or x coordinate in ESPG:3857
	lat: longitude value in ESPG:4326 or y coordinate in ESPG:3857
	tile_size: size of the tile to return
	crs: string, ESPG:4326 or ESPG:3857
    """
    assert crs in ['ESPG:3857', 'ESPG:4326']
    if crs == 'ESPG:4326':
        xgeo, ygeo = geodesic2spherical(lon, lat)
    else:
        xgeo, ygeo = lon, lat
    idx, idy = mosaicdb.index(xgeo, ygeo)
    return mosaicdb.read(window=Window(idy, idx, 256, 256))
Exemple #8
0
 def test_read_raster_pass(self):
     """from_raster"""
     exp = Exposures.from_raster(HAZ_DEMO_FL, window=Window(10, 20, 50, 60))
     exp.check()
     self.assertTrue(u_coord.equal_crs(exp.crs, DEF_CRS))
     self.assertAlmostEqual(exp.gdf['latitude'].max(),
                            10.248220966978932 - 0.009000000000000341 / 2)
     self.assertAlmostEqual(exp.gdf['latitude'].min(),
                            10.248220966978932 - 0.009000000000000341
                            / 2 - 59 * 0.009000000000000341)
     self.assertAlmostEqual(exp.gdf['longitude'].min(),
                            -69.2471495969998 + 0.009000000000000341 / 2)
     self.assertAlmostEqual(exp.gdf['longitude'].max(),
                            -69.2471495969998 + 0.009000000000000341
                            / 2 + 49 * 0.009000000000000341)
     self.assertEqual(len(exp.gdf), 60 * 50)
     self.assertAlmostEqual(exp.gdf.value.values.reshape((60, 50))[25, 12], 0.056825936)
Exemple #9
0
    def _create_patch_window(self, row, size_px=256):
        point = row.geometry
        raster = row.raster

        row, col = raster.index(point.x, point.y)

        # Find the top left corner
        row = row - size_px//2
        col = col - size_px//2

        window = Window(col, row, size_px, size_px)
        window = rasterio.windows.get_data_window(raster).intersection(window)

        if rasterio.windows.shape(window) != (size_px, size_px):
            return None

        return window
Exemple #10
0
def extract_cubic(dataset, coords, size=(9, 9)):
    '''takes in a opened rasterio dataset, a list of tuples of coords [(lat, long),], size of the extracted cubics, 
    and returns coords_length * size1 * size2 * n_bands'''
    bands = dataset.count
    output = np.zeros((len(coords), *size, bands))

    trans_coords = convert_from_EPSG4326(coords, dataset)
    for i, (lon, lat) in enumerate(trans_coords):
        py, px = dataset.index(lon, lat)
        window = Window(px - size[0] // 2, py - size[1] // 2, size[0], size[1])
        clip = dataset.read(window=window)
        try:
            output[i] = np.transpose(clip, (1, 2, 0))
        except ValueError:
            output[i] = np.full((*size, bands), np.nan)

    return output
 def test_intensity_cat_to_prob(self):
     empty_LS = Landslide()
     window_array = empty_LS._get_window_from_coords(
         path_sourcefile=os.path.join(
             DATA_DIR_TEST, 'test_global_landslide_nowcast_20190501.tif'),
         bbox=[47, 23, 46, 22])
     empty_LS.set_raster([
         os.path.join(DATA_DIR_TEST,
                      'test_global_landslide_nowcast_20190501.tif')
     ],
                         window=Window(window_array[0], window_array[1],
                                       window_array[3], window_array[2]))
     empty_LS._intensity_cat_to_prob(max_prob=0.0001)
     self.assertTrue(max(empty_LS.intensity_cat.data) == 2)
     self.assertTrue(min(empty_LS.intensity_cat.data) == 1)
     self.assertTrue(max(empty_LS.intensity.data) == 0.0001)
     self.assertTrue(min(empty_LS.intensity.data) == 0)
Exemple #12
0
def extract_raster_window(in_ds, upperleft, lowerright):
    meta = in_ds.meta.copy()
    start_row, start_col = in_ds.index(upperleft.x, upperleft.y)
    stop_row, stop_col = in_ds.index(lowerright.x, lowerright.y)
    width = stop_col - start_col
    height = stop_row - start_row
    zone = Window(start_col, start_row, width, height)
    data = in_ds.read(1, window=zone)

    meta.update({
        "driver": "GTiff",
        "height": zone.height,
        "width": zone.width,
        "transform": transform(zone, in_ds.transform),
    })

    return data, meta, zone
Exemple #13
0
def predict(ds, config, writer, partitions=10):
    feats = {}
    covariates = config.covariates
    process_rows = mpiops.array_split(range(ds.height))

    model = pickle.load(open('local_kriged_regression.model', 'rb'))

    # idea: instead of rows, using tiles may be more efficient due to
    # variogram computation in the LocalRegressionKriging class
    for p, r in enumerate(np.array_split(process_rows, partitions)):
        log.info('Processing partition {}'.format(p+1))
        step = len(r)
        for c in covariates:
            with rio.open(c) as src:
                # Window(col_off, row_off, width, height)
                # assume band one for now
                w = src.read(1, window=Window(0, r[0], src.width, step))
                feats[splitext(basename(c))[0]] = w.flatten()

        feats = OrderedDict(sorted(feats.items()))

        # stack for prediction
        X = np.ma.vstack([v for v in feats.values()]).T

        log.info('predicting rows {r0} through {rl} '
                 'using process {rank}'.format(r0=r[0], rl=r[-1],
                                               rank=mpiops.rank))

        # vectors of rows and cols we need lats and lons for
        rs = np.repeat(r, ds.width)
        cs = np.repeat(np.atleast_2d(np.array(range(ds.width))),
                       step, axis=0).flatten()
        lats, lons = ds.xy(rs, cs)
        # stack with lats and lons
        X = np.ma.hstack([np.atleast_2d(lats).T, np.atleast_2d(lons).T, X])

        # TODO: remove this when we have imputation working
        # just assign nodata when there is nodata in any covariate
        no_data_mask = X.mask.sum(axis=1) != 0
        pred, res = model.predict(X, lats, lons)
        pred[no_data_mask] = DEFAULT_NODATA
        res[no_data_mask] = DEFAULT_NODATA

        writer.write({'data': pred.reshape((step, -1)).astype(rio.float32),
                      'residuals': res.reshape((step, -1)).astype(rio.float32),
                      'window': (0, r[0], ds.width, step)})
Exemple #14
0
def borderCropper(src, source_win, savename, oxt):
    pre_crop, crd = CvContourCrop(
        np.array(
            reshape_as_image(src.read(window=source_win)).astype(np.uint8)))
    bx = maxRectContourCrop(pre_crop)
    del pre_crop
    ysize = bx[3] - bx[1]
    xsize = bx[2] - bx[0]
    xoff = crd[0] + bx[0]  #+src_win.width
    yoff = crd[1] + bx[1]  #+src_win.height
    bc_win = Window(xoff, yoff, xsize, ysize)
    src_win = copy(bc_win)
    dst_trs = src.window_transform(src_win)
    src_width = bc_win.width
    src_height = bc_win.height
    savename = savename.split('.' + oxt)[0] + '_cropped'
    return (src_width, src_height, src_win, dst_trs, savename)
Exemple #15
0
def sample_gen(dataset, xy, indexes=None):
    """Generator for sampled pixels"""
    index = dataset.index
    read = dataset.read

    if isinstance(indexes, int):
        indexes = [indexes]

    for x, y in xy:
        row_off, col_off = index(x, y)
        if row_off < 0 or col_off < 0:
            yield numpy.ones(
                (dataset.count, ), dtype=dataset.dtypes[0]) * dataset.nodata
        else:
            window = Window(col_off, row_off, 1, 1)
            data = read(indexes, window=window, masked=False)
            yield data[:, 0, 0]
        def calculate_chunks(width, height, tiles):
            pixels = width * height
            max_pixels = pixels / tiles
            chunk_size = int(math.floor(math.sqrt(max_pixels)))
            ncols = int(math.ceil(width / chunk_size))
            nrows = int(math.ceil(height / chunk_size))
            chunk_windows = []

            for col in range(ncols):
                col_offset = col * chunk_size
                w = min(chunk_size, width - col_offset)
                for row in range(nrows):
                    row_offset = row * chunk_size
                    h = min(chunk_size, height - row_offset)
                    chunk_windows.append(
                        ((row, col), Window(col_offset, row_offset, w, h)))
            return chunk_windows
def interpolated_value(x, y, dem, method='bilinear', scaling_factor=1.0):
    '''Given a point (x, y), find the interpolated value in the raster using
    bilinear interpolation.

    '''
    methods = {'spline': bivariate_spline, 'bilinear': bilinear}

    # At this point, we assume that the input DEM is in the same crs as the
    # x y values.

    # The DEM's affine transformation: maps units along its indices to crs
    # coordinates. e.g. if the DEM is 1000x1000, maps xy values in the
    # 0-1000 range to the DEM's CRS, e.g. lon-lat
    aff = dem.transform
    # The inverse of the transform: maps values in the DEM's crs to indices.
    # Note: the output values are floats between the index integers.
    inv = ~aff

    # Get the in-DEM index coordinates
    _x, _y = inv * (x, y)

    # Extract a window of coordinates
    if method == 'bilinear':
        # Get a 2x2 window of pixels surrounding the coordinates
        dim = 2
        offset_x = math.floor(_x)
        offset_y = math.floor(_y)
    elif method == 'spline':
        # Get a 5x5 window of pixels surrounding the coordinates
        dim = 3  # window size (should be odd)
        offset = math.floor(dim / 2.)
        offset_x = int(math.floor(_x) - offset)
        offset_y = int(math.floor(_y) - offset)
    else:
        raise ValueError(
            'Invalid interpolation method {} selected'.format(method))
    dem_arr = dem.read(1, window=Window(offset_x, offset_y, dim, dim))

    dx = _x - offset_x
    dy = _y - offset_y

    interpolator = methods[method]

    interpolated = interpolator(dx, dy, dem_arr)

    return scaling_factor * interpolated
Exemple #18
0
async def test_cog_read_internal_tile(infile, create_cog_reader):
    async with create_cog_reader(infile) as cog:
        # Read top left tile at native resolution
        tile = await cog.get_tile(0, 0, 0)
        ifd = cog.ifds[0]

        # Make sure tile is the right size
        assert tile.shape == (
            ifd.SamplesPerPixel.value,
            ifd.TileHeight.value,
            ifd.TileWidth.value,
        )

        with rasterio.open(infile) as src:
            window = Window(0, 0, ifd.TileWidth.value, ifd.TileHeight.value)
            rio_tile = src.read(window=window)
            # Internal mask
            if np.ma.is_masked(tile) and cog.is_masked:
                assert cog.is_masked
                tile_arr = np.ma.getdata(tile)
                tile_mask = np.ma.getmask(tile)
                rio_mask = src.read_masks(1, window=window)

                # Make sure image data is the same
                assert pytest.approx(np.min(rio_tile), 2) == np.min(tile_arr)
                assert pytest.approx(np.mean(rio_tile), 2) == np.mean(tile_arr)
                assert pytest.approx(np.max(rio_tile), 2) == np.max(tile_arr)

                # Make sure mask data is the same
                rio_mask_counts = np.unique(rio_mask, return_counts=True)
                tile_mask_counts = np.unique(tile_mask, return_counts=True)
                assert rio_mask_counts[0].all() == tile_mask_counts[0].all()
                assert rio_mask_counts[1].all() == tile_mask_counts[1].all()
            # Nodata
            elif ifd.nodata is not None:
                # Mask rio array to match aiocogeo output
                rio_tile = np.ma.masked_where(
                    rio_tile == src.profile["nodata"], rio_tile)
                assert pytest.approx(np.min(rio_tile), 2) == np.min(tile)
                assert pytest.approx(np.mean(rio_tile), 2) == np.mean(tile)
                assert pytest.approx(np.max(rio_tile), 2) == np.max(tile)
            else:
                # Make sure image data is the same
                assert pytest.approx(np.min(rio_tile), 2) == np.min(tile)
                assert pytest.approx(np.mean(rio_tile), 2) == np.mean(tile)
                assert pytest.approx(np.max(rio_tile), 2) == np.max(tile)
Exemple #19
0
def test_sliding_windows_whole_width_only():
    windows = list(sliding_windows(size=2, step_size=2, width=6, height=5))
    assert windows == [
        # row 0
        Window(col_off=0, row_off=0, width=2, height=2),
        Window(col_off=2, row_off=0, width=2, height=2),
        Window(col_off=4, row_off=0, width=2, height=2),
        # row 1
        Window(col_off=0, row_off=2, width=2, height=2),
        Window(col_off=2, row_off=2, width=2, height=2),
        Window(col_off=4, row_off=2, width=2, height=2),
    ]
Exemple #20
0
def get_raster_values(raster, x_raster_proj, y_raster_proj):

    # raster_values = raster.sample(zip(x_raster_proj.tolist(), y_raster_proj.tolist()))
    # raster_values = [x[0] for x in raster_values]

    min_x = min(x_raster_proj)
    max_x = max(x_raster_proj)

    min_y = min(y_raster_proj)
    max_y = max(y_raster_proj)

    y_offset = (max_y -
                raster.meta["transform"].f) / raster.meta["transform"].e
    y_offset = np.floor(y_offset).astype(int)
    assert y_offset > 0

    height = (min_y - raster.meta["transform"].f
              ) / raster.meta["transform"].e - y_offset
    height = np.ceil(height).astype(int)
    assert height > 0

    x_offset = (min_x -
                raster.meta["transform"].c) / raster.meta["transform"].a
    x_offset = np.floor(x_offset).astype(int)
    assert x_offset > 0

    width = (max_x - raster.meta["transform"].c
             ) / raster.meta["transform"].a - x_offset
    width = np.ceil(width).astype(int)
    assert width > 0

    window = Window(x_offset, y_offset, width, height)

    # Note: shape is (band, y, x), i.e. (band, height, width)
    raster_window_values = raster.read(window=window)

    x_window_index = (x_raster_proj - raster.meta["transform"].c
                      ) / raster.meta["transform"].a - x_offset
    x_window_index = x_window_index.astype(int)

    y_window_index = (y_raster_proj - raster.meta["transform"].f
                      ) / raster.meta["transform"].e - y_offset
    y_window_index = y_window_index.astype(int)

    return raster_window_values[0, y_window_index, x_window_index]
    def channel_resize(self,
                       imgpath,
                       channel_name,
                       window_x0,
                       window_y0,
                       window_x1,
                       window_y1,
                       IMG_ROW=256,
                       IMG_COL=256):
        #--------------------------------------------------------------
        # other channels
        #--------------------------------------------------------------

        if 'arctic' in channel_name and self.sentinel:
            key = '/home/user/data/projects/research-project/notebooks/Illarionova/usgs_sentinel/Left_shore/krasnoborsk/arctic_dem.tif'
        else:
            key = imgpath + channel_name

        dx = self.data_gdal[key]['dx']
        dy = self.data_gdal[key]['dy']

        x0 = self.data_gdal[key]['x0']
        y0 = self.data_gdal[key]['y0']

        size_x = self.data_gdal[key]['size_x']
        size_y = self.data_gdal[key]['size_y']

        coord_x0 = abs((x0 - window_x0) / dx)
        coord_y0 = abs((y0 - window_y0) / dy)

        coord_x1 = abs((x0 - window_x1) / dx)
        coord_y1 = abs((y0 - window_y1) / dy)

        window = Window(coord_x0, coord_y0, coord_x1 - coord_x0,
                        coord_y1 - coord_y0)

        with rasterio.open(key) as src:  #imgpath + channel_name
            img_crop = src.read(window=window)

        dim = (IMG_ROW, IMG_COL)
        new_crop = img_crop.transpose(1, 2, 0)
        new_crop = np.expand_dims(
            cv2.resize(new_crop, dim, interpolation=cv2.INTER_NEAREST), 0)

        return new_crop
Exemple #22
0
def test_chunks_with_pad(raster):
    shape = (256, 256)

    # validate that each chunk is in the right offsets
    for r, offsets in raster.chunks(shape, pad=True):
        expected_raster = raster.get_window(Window(col_off=offsets[0], row_off=offsets[
                                            1], width=shape[0], height=shape[1]))
        assert r == expected_raster

    # validate we can construct the original raster from the chunks
    chunks = [r for r, _ in raster.chunks(pad=True)]
    merged_raster = join(chunks)
    assert merged_raster.crs == raster.crs
    assert merged_raster.affine.almost_equals(raster.affine, precision=1e-3)
    # raster created has bigger shape cause of the edges
    assert np.array_equal(merged_raster.image[:, :raster.height, :raster.width], raster.image)
    # validating the rest is masked
    assert np.array_equal(merged_raster.image.mask[:, raster.height:, raster.width:].all(), True)
def windowed_reads_rasterio(tif, slice_coords):
    """Make a series of windowed reads

    Parameters
    ----------
    tif: open rasterio image
    slice_coords: list
        List of (x, y, width, height) rows in pixel coords to make windowed
        reads.
    """

    # Loop through all windows and save if they don't exist
    windows = []
    for (x_pt, y_pt, width, height) in slice_coords:
        # Rasterio works in `xy` coords, not `ij`
        windows.append(tif.read(1, window=Window(y_pt, x_pt, width, height)))

    return windows
def _call_tile_endpoint(tile, tile_url, extrema, tilesize=256, retry=0):
    """Tile Worker.

    Call the tile endpoint for each mercator tile.
    """
    url = tile_url.format(z=tile.z, x=tile.x, y=tile.y)
    img = requests.get(url)
    if not img.status_code == 200:
        time.sleep(3)
        if retry == 3:
            raise Exception("Empty")
        return _call_tile_endpoint(tile, retry=retry + 1)

    row = (tile.y - extrema["y"]["min"]) * tilesize
    col = (tile.x - extrema["x"]["min"]) * tilesize
    window = Window(col_off=col, row_off=row, width=tilesize, height=tilesize)

    return window, img
Exemple #25
0
    def get_images(self, tiles: List[Tile]) -> Iterator[Tuple[Tile, bytes]]:
        """return bounds of original tile filled with the 4 child tiles 1 zoom level up in bytes"""
        for tile in tiles:
            print('in SuperTileDownloader get images function')
            print(tile)
            w_lst = []
            for i in range(2):
                for j in range(2):
                    window = Window(i * 256, j * 256, 256, 256)
                    w_lst.append(window)
            z = 1 + tile.z
            child_tiles = children(tile,
                                   zoom=z)  #get this from database (tile_zoom)
            child_tiles.sort()
            print('in supertile get_images')
            print(child_tiles)

            with MemoryFile() as memfile:
                with memfile.open(driver='jpeg',
                                  height=512,
                                  width=512,
                                  count=3,
                                  dtype=rasterio.uint8) as dataset:
                    for num, t in enumerate(child_tiles):
                        print(num)
                        print(t)
                        url = self.imagery.format(x=t.x, y=t.y, z=t.z)
                        print(url)
                        r = requests.get(url)
                        img = np.array(Image.open(io.BytesIO(r.content)),
                                       dtype=np.uint8)
                        try:
                            img = img.reshape(
                                (256, 256, 3)
                            )  # 4 channels returned from some endpoints, but not all
                        except ValueError:
                            img = img.reshape((256, 256, 4))
                        img = img[:, :, :3]
                        img = np.rollaxis(img, 2, 0)
                        print(w_lst[num])
                        print()
                        dataset.write(img, window=w_lst[num])
                dataset_b = memfile.read()  #but this fails
                yield (tile, dataset_b)
def RWRasterBlocks(rasterFp, dataSave_fp, windowsList):
    newPath = os.path.join(saveRasterByBlock_fp, "reclassify")
    try:
        os.mkdir(newPath)
    except OSError:
        print("Creation of the directory %s failed" % newPath)
    else:
        print("Successfully created the directory %s " % newPath)
    a_T = datetime.datetime.now()
    i = 0
    buildingAmount_epochMultiple = 0
    for win in tqdm(windowsList):
        with rasterio.open(rasterFp, "r+") as src:
            src.nodata = -1
            w = src.read(1, window=win)
            # print("_"*50)
            # print(w.shape)
            profile = src.profile
            win_transform = src.window_transform(win)
        #计算部分
        w = computing(w)
        #配置raster属性值,尤其compress和dtype参数部分,可以大幅度降低栅格大小
        profile.update(width=win.width,
                       height=win.height,
                       count=1,
                       transform=win_transform,
                       compress='lzw',
                       dtype=rasterio.int8)
        # with rasterio.open(os.path.join(saveRasterByBlock_fp,"testingBlock.tif"), 'w', driver='GTiff' width=512, height=256, count=1,dtype=w.dtype,**profile) as dst:
        with rasterio.open(
                os.path.join(newPath, "buildingHeight_reclassify_%d.tif" % i),
                'w', **profile) as dst:
            dst.write(w, window=Window(0, 0, win.width, win.height), indexes=1)

        i += 1
        # if i ==2:
        #     break
        # np.save(os.path.join(dataSave_fp,"data_%d.npy"%zValue),buildingAmount_epochMultiple)
        # buildingAmount[zValue]=buildingAmount_epochMultiple
        # print("_"*50)
        # print("%d---zValue has completed!!!"%zValue)
    b_T = datetime.datetime.now()
    print("time span:", b_T - a_T)
    print("_" * 50)
Exemple #27
0
def test_sliding_windows_odd_size():
    windows = list(sliding_windows(size=4, step_size=2, width=7, height=9))
    assert windows == [
        # row 0
        Window(col_off=0, row_off=0, width=4, height=4),
        Window(col_off=2, row_off=0, width=4, height=4),
        # row 1
        Window(col_off=0, row_off=2, width=4, height=4),
        Window(col_off=2, row_off=2, width=4, height=4),
        # row 2
        Window(col_off=0, row_off=4, width=4, height=4),
        Window(col_off=2, row_off=4, width=4, height=4),
    ]
Exemple #28
0
def clip_raster_to_another(to_clip, template, dstfile):
    with rasterio.open(to_clip, "r") as ds_to_clip:
        with rasterio.open(template, "r") as ds_template:
            ul_row, ul_col = ds_to_clip.index(ds_template.bounds.left,
                                              ds_template.bounds.top)
            lr_row, lr_col = ds_to_clip.index(ds_template.bounds.right,
                                              ds_template.bounds.bottom)
            window = Window(ul_col, ul_row, (lr_col - ul_col),
                            (lr_row - ul_row))

            options = ds_template.meta.copy()
            clipped_data = ds_to_clip.read(1, window=window)
            assert ds_template.meta["crs"] == ds_to_clip.meta["crs"]

    options["dtype"] = rasterio.dtypes.uint8
    with rasterio.open(dstfile, "w", **options) as ds_clipped:
        ds_clipped.write(clipped_data, 1)

    return 0
Exemple #29
0
    def __setitem__(self, key, item):
        """Put the data chunk in the image"""
        if len(key) == 3:
            index_range, y, x = key
            indexes = list(
                range(index_range.start + 1, index_range.stop + 1,
                      index_range.step or 1))
        else:
            indexes = 1
            y, x = key

        chy_off = y.start
        chy = y.stop - y.start
        chx_off = x.start
        chx = x.stop - x.start

        self.dataset.write(item,
                           window=Window(chx_off, chy_off, chx, chy),
                           indexes=indexes)
Exemple #30
0
 def test_read_all_pass(self):
     """Test centr_ras data"""
     centr_ras = Centroids()
     inten_ras = centr_ras.set_raster_file(HAZ_DEMO_FL,
                                           window=Window(0, 0, 50, 60))
     self.assertAlmostEqual(centr_ras.meta['crs'], DEF_CRS)
     self.assertAlmostEqual(centr_ras.meta['transform'].c,
                            -69.33714959699981)
     self.assertAlmostEqual(centr_ras.meta['transform'].a,
                            0.009000000000000341)
     self.assertAlmostEqual(centr_ras.meta['transform'].b, 0.0)
     self.assertAlmostEqual(centr_ras.meta['transform'].f,
                            10.42822096697894)
     self.assertAlmostEqual(centr_ras.meta['transform'].d, 0.0)
     self.assertAlmostEqual(centr_ras.meta['transform'].e,
                            -0.009000000000000341)
     self.assertEqual(centr_ras.meta['height'], 60)
     self.assertEqual(centr_ras.meta['width'], 50)
     self.assertEqual(inten_ras.shape, (1, 60 * 50))