示例#1
0
文件: utils.py 项目: iva-dtis/DISIR
def polygonize(input_file, output_file, proj):
    """
    Polygonise a raster file
    Parameters
    ----------
    input_file: str Path to input raster
    output_file: str Path to output vector
    proj: str Projection
    """
    with buzz.Dataset(sr_work=proj, sr_fallback="WGS84").close as ds:
        ds.open_raster("raster", input_file)
        if os.path.isfile(output_file):
            os.remove(output_file)
        fields = [{"name": "class", "type": np.int32}]
        ds.create_vector("vector",
                         output_file,
                         "polygon",
                         driver="geojson",
                         fields=fields)
        fp = ds["raster"].fp
        mask = ds["raster"].get_data()
        for class_idx in np.unique(mask):
            if class_idx != 0:
                polygons = fp.find_polygons(mask == class_idx)
                if not polygons:
                    continue
                for poly in polygons:
                    ds["vector"].insert_data(poly, {"class": class_idx})
示例#2
0
def predict_from_file(rgb_path,
                      model,
                      downsampling_factor=1,
                      tile_size=256,
                      no_of_gpu=1,
                      batch_size=2):

    ds_rgb = buzz.Dataset(allow_interpolation=True)
    ds_rgb.open_raster('rgb', rgb_path)

    fp = buzz.Footprint(
        tl=ds_rgb.rgb.fp.tl,
        size=ds_rgb.rgb.fp.size,
        rsize=ds_rgb.rgb.fp.rsize / downsampling_factor,
    )  #unsampling

    overlapx = int(tile_size / 2)
    overlapy = int(tile_size / 2)

    try:
        predicted_probamap = predict_map(model, ds_rgb, fp, tile_size,
                                         overlapx, overlapy, batch_size)
    except Exception as e:
        print(e)
        print("Retrying prediction with reduced batch size")
        predicted_probamap = predict_map(model, ds_rgb, fp, tile_size,
                                         overlapx, overlapy, batch_size // 2)

    return predicted_probamap, fp
示例#3
0
def write_tif_from_fp(array,
                      tile_id,
                      build_dataset_dir,
                      output_dir,
                      prefix=""):
    """

    Args:
        array:
        tile_id:
        build_dataset_dir:
        output_dir:
        prefix:

    Returns:

    """
    assert ".tif" in tile_id, "wrong tile id should en with tif"
    ds_tile = buzz.Dataset()
    output_path = "{}{}_image{}".format(output_dir, prefix, tile_id)
    fp_tile = get_fp(tile_id, build_dataset_dir)
    with ds_tile.acreate_raster(
            output_path,
            fp_tile,
            'float32',
            channel_count=DICT_SHAPE[LABEL_DIR][-1]).close as cache:
        cache.set_data(array)

    return output_path
示例#4
0
def generate_dsm(rsize=(16000, 16000),
                 resolution=0.03,
                 delta_z=(10, 110),
                 roughness=0.45,
                 nb_houses=5,
                 verbose=False):
    """generates a `.tif` file containing an artificial dsm.
    the generated dsm does not have a projection, nor no_data values.

    Parameters
    ----------
    - rsize: (int, int)
        desired raster size
    - resolution: (float)
        desired resolution in meters
    - delta_z: (float, float)
        span of elevations on generated dsm in meters
    - roughness: float
        value ranging from 0 to 1. 0 will generate a very smooth dsm (a plane). 0.5 generates
        midly rough landscapes (imagine a valley in the Swiss Alps). 1 generates very sharp results.
    - nb_houses: int
        number of houses to be added on the dsm
    - verbose: boolean
        if True, some information about the generated dsm will be printed.
    """

    w, l = rsize
    min_z, max_z = delta_z

    if verbose:
        print("==== Metrics on generated dsm ====")
        print(f"  w, l = {w}, {l}")
        print(f"  resolution = {resolution}")
        print(f"  roughness = {roughness}")
        print(f"  min_z, max_z = {min_z}, {max_z}")
        print(f"  nb_houses = {nb_houses}")

    dsm = diamond_square((l, w), min_z, max_z, roughness)
    for _ in range(nb_houses):
        _put_house_on_dsm(dsm, resolution, verbose)

    tlx = np.random.uniform(42, 1337)
    tly = np.random.uniform(32, 111)
    fp = buzz.Footprint(tl=(tlx, tly),
                        size=(w * resolution, l * resolution),
                        rsize=(w, l))

    ds = buzz.Dataset(allow_interpolation=False)
    filename = f'{uuid.uuid4()}.tif'
    if verbose:
        print(f'  {fp}')
        print('  filename = ' + filename)

    with ds.acreate_raster(filename,
                           fp,
                           dtype='float32',
                           channel_count=1,
                           sr=None).close as out:
        out.set_data(dsm)
    return filename
示例#5
0
def main():
    path = example_tools.create_random_elevation_gtiff()
    ds = buzz.Dataset(allow_interpolation=True)

    print('Classic opening')
    # Features:
    # - Disk reads are not tiled
    # - Resampling operations are not tiled
    with ds.aopen_raster(path).close as r:
        test_raster(r)

    return # The NEXT features are not yet implemented

    print('Opening within scheduler')
    # Features:
    # - Disk reads are automatically tiled and parallelized
    # - Resampling operations are automatically tiled and parallelized
    # - `iter_data()` method is available
    with ds.aopen_raster(path, async_=True).close as r:
        # `async_=True` is equivalent to
        # `async_={}`, and also equivalent to
        # `async_={io_pool='io', resample_pool='cpu', max_resampling_size=512, max_read_size=512}`
        test_raster(r)

    # `Dataset.close()` closes all rasters, the scheduler, and the pools.
    # If you let the garbage collector collect the `Dataset`, the rasters and
    # the scheduler will be correctly closed, but the pools will leak memory.
    ds.close()

    os.remove(path)
示例#6
0
    def _reproj_and_clip_vector(srcpath, dstpath):
        print("Transforming: {}".format(srcpath), flush=True)
        if os.path.isfile(dstpath):
            print("  {} already exits".format(dstpath), flush=True)
            return
        with buzz.Env(significant=10, allow_complex_footprint=True):
            ds = buzz.Dataset(sr_work='WGS84', analyse_transformation=False)
            ds.open_vector('src', srcpath, driver='GeoJSON')
            ds.open_raster('raster', rgb_wgs84_path)
            ds.create_vector(
                'dst',
                dstpath,
                'polygon',
                driver='ESRI Shapefile',
                sr=EPSG29100,
            )

            # Iterate over all geoJSON geometries overlapping with raster
            #   Ignoring geoJSON fields
            #   Save all polygons to Shapefile
            for geom in ds.src.iter_data(None, mask=ds.raster.fp, clip=True):
                if isinstance(geom, shapely.geometry.Polygon):
                    ds.dst.insert_data(geom)
                elif isinstance(geom, shapely.geometry.MultiPolygon):
                    for poly in geom.geoms:
                        ds.dst.insert_data(poly)
        print("Transformed: {}\n  to: {}".format(srcpath, dstpath), flush=True)
示例#7
0
def get_fp(tile_id, build_dataset_dir):
    """:param str the image id
    :returns a buzzard fp"""
    image_path = find_path(build_dataset_dir + XDIR + DICT_ORGA[XDIR][0],
                           tile_id)
    ds = buzz.Dataset(allow_interpolation=True)
    ds.open_raster('tile', image_path)
    return ds.tile.fp
示例#8
0
def load_tile_classif(input_dataset,list_id_tile,path_classif_tif,max_im):
    """:param input_dataset : str path to the  dataset which contains .tif
    :param list_id_tile : list of tile id
    :param path_classif_tif : str path to the landclass rejorj tiff, be careful that the projections match with the tiles tiff
    :param max_im an int : if < len tile id reduced the size of the output to max_im
    :returns a list of array which are land classification for each tile"""
    assert os.path.isfile(path_classif_tif),"No tif at {}".format(path_classif_tif)
    if max_im<len(list_id_tile):
        list_id_tile=list_id_tile[:max_im]
    batch_landclass=[]
    ds = buzz.Dataset(allow_interpolation=True)
    ds_tile=buzz.Dataset()
    ds.open_raster('land_class',path_classif_tif)
    for i,image_id in enumerate(list_id_tile):
        path=find_path(input_dataset + XDIR + DICT_ORGA[XDIR][0], image_id)
        with ds_tile.open_raster('tile',path).close:
            fp_tile=ds_tile.tile.fp
            tile_land_class=ds.land_class.get_data(fp=fp_tile)
            batch_landclass+=[tile_land_class]
    return batch_landclass
示例#9
0
def tif2_path(fps):
    """Create a tif in SR2 with all single letter footprints from `fps` fixture burnt in it"""
    path = '{}/{}.tif'.format(tempfile.gettempdir(), uuid.uuid4())

    ds = buzz.Dataset()
    with ds.acreate_raster(path, fps.AI, 'int32', 1, sr=SR2['wkt']).close as r:
        for letter in string.ascii_uppercase[:9]:
            fp = fps[letter]
            arr = np.full(fp.shape, ord(letter), dtype=int)
            r.set_data(arr, fp=fp)
    yield path
    gdal.GetDriverByName('GTiff').Delete(path)
示例#10
0
def shp2_path(fps):
    """Create a shapefile in SR2 containing all single letter polygons from `fps` fixture"""
    path = '{}/{}.shp'.format(tempfile.gettempdir(), uuid.uuid4())

    ds = buzz.Dataset()
    ds.create_vector('poly', path, 'polygon', sr=SR2['wkt'])
    for letter in string.ascii_uppercase[:9]:
        ds.poly.insert_data(fps[letter].poly)
    ds.poly.close()
    del ds
    yield path
    gdal.GetDriverByName('ESRI Shapefile').Delete(path)
示例#11
0
    def _save_output(self, data, output, nodata_mask, fp):
        input_file = data["input_files"][0]
        output_file = data["output_file"]
        if self.ssh_server:
            input_file = self.ssh_server.tmp_file(input_file)
            output_file = self.ssh_server.tmp_file(output_file)
        with buzz.Dataset().close as ds:
            proj = ds.aopen_raster(input_file).proj4_virtual

            if os.path.isfile(output_file):
                ds.open_raster("output", output_file, mode="w")
                if not ds.output.fp.poly.contains(fp.poly):
                    ds.output.close()
                    os.remove(output_file)
                    ds.create_raster(
                        "output",
                        output_file,
                        self.original_fp,
                        dtype=np.uint8,
                        channel_count=1,
                        sr=proj,
                        channels_schema={"nodata": self.n_classes},
                    )
            else:
                ds.create_raster(
                    "output",
                    output_file,
                    self.original_fp,
                    dtype=np.uint8,
                    channel_count=1,
                    sr=proj,
                    channels_schema={"nodata": self.n_classes},
                )
            if nodata_mask:
                subslice = fp.slice_in(self.original_fp)
                nodata_mask = nodata_mask[subslice]
                output[nodata_mask] = self.n_classes

            ds.output.set_data(output.astype(np.uint8), fp, channels=0)
            ds.output.close()
        if self.ssh_server:
            self.ssh_server.put(data["output_file"])
        if data["polygonize"]:
            poylgon_file = (
                data["polygonize"]
                if not self.ssh_server
                else self.ssh_server.tmp_file(data["polygonize"])
            )
            polygonize(output_file, poylgon_file, proj)
            if self.ssh_server:
                self.ssh_server.put(data["polygonize"])
    def _test():
        ds = buzz.Dataset()
        assert len(ds) == len(ws) == 0

        prox = ds.create_vector('test', random_path_shp, 'polygon')
        ws.add(prox)
        assert len(ds) == len(ws) == 1

        prox = ds.acreate_vector('', 'polygon', driver='Memory')
        ws.add(prox)
        assert len(ds) == len(ws) == 2

        ws.add(ds)
        assert len(ws) == 3
示例#13
0
文件: utils.py 项目: iva-dtis/DISIR
def vec_to_list(input_vec, n_classes, fp, dist_map=False):
    """
    Rasterize a vector file.
    Assume that geometries have a field `class`.
    Return a list of one hot encoding rasters for each class.
    input_ortho is used to delimit the boundaries of the raster.
    Only accept shapefiles and geojson as input for the vector.

    --------------
    Parameters:
        dist_map: bool Dilate points  and apply distance transform
    """
    with buzz.Dataset(allow_none_geometry=True).close as ds:
        ext = input_vec.split(".")[1]
        if ext == "geojson":
            driver = "geojson"
        elif ext == "shp":
            driver = "ESRI Shapefile"
        else:
            raise Exception(
                "Wrong kind of input vector. Expect either geojson or shapefile."
            )
        ds.open_vector("poly", input_vec, driver=driver)
        rasters = [
            np.zeros(fp.shape, dtype=np.uint8) for i in range(n_classes)
        ]
        # TODO iter_data warning
        for geometry, class_idx in ds.poly.iter_data("class"):
            if isinstance(geometry, shapely.geometry.point.Point):
                burned_point = np.asarray(
                    fp.spatial_to_raster(
                        np.asarray(geometry.xy).transpose((1, 0)))[0])
                if (0 < burned_point[1] < rasters[0].shape[0]
                        and 0 < burned_point[0] < rasters[0].shape[1]):
                    rasters[class_idx][burned_point[1], burned_point[0]] = 1
            elif isinstance(geometry, shapely.geometry.polygon.Polygon):
                burned_polygon = fp.burn_polygons(geometry)
                rasters[class_idx][burned_polygon] = 1
        kernel = (cv.getStructuringElement(cv.MORPH_ELLIPSE,
                                           (10, 10)) if dist_map else np.ones(
                                               (3, 3), dtype=np.uint8))
        rasters = [cv.dilate(raster, kernel) for raster in rasters]
        if dist_map:
            rasters = [
                cv.distanceTransform(raster.astype(np.uint8), cv.DIST_L2, 3)
                for raster in rasters
            ]
        rasters = [raster[np.newaxis] for raster in rasters]
        return rasters
def tiling_fromgrid(path_tif, path_grid_geojson, output_dir):

    grid = load_grid_geojson(path_grid_geojson)
    ds = buzz.Dataset()
    r = ds.open_raster("raster", path_tif)
    v = ds.open_vector("vect", path_grid_geojson, driver='GeoJSON')
    print(type(v))
    print(v.get_keys())
    for i, poly in enumerate(v.iter_data()):
        # Compute the Footprint bounding `poly`
        print(poly)
        print(type(poly))
        print(grid)
        #print(poly["location"])
        fp = r.fp.intersection(poly)
        land_class_tile = r.get_data(fp=fp)
示例#15
0
def test_value_error(path):
    ds = buzz.Dataset()
    v = ds.acreate_vector(path, type='polygon')

    with pytest.raises(ValueError, match='geom_type'):
        list(v.iter_data(geom_type=''))
    with pytest.raises(TypeError, match='slicing'):
        list(v.iter_data(slicing=0))
    with pytest.raises(ValueError, match='clip'):
        list(v.iter_data(clip=True))
    with pytest.raises(TypeError, match='slicing'):
        list(v.iter_geojson(slicing=0))
    with pytest.raises(ValueError, match='clip'):
        list(v.iter_geojson(clip=True))
    with pytest.raises(TypeError, match='a'):
        v.insert_data(42)
示例#16
0
def test_run(path, driver, fps, test_fields, test_coords_insertion):
    # Step 1 - Build file according to fixture parameters **********************
    ds = buzz.Dataset()

    if test_fields:
        fields = FIELDS
    else:
        fields = []

    geom_type = 'polygon'
    v = ds.create_avector(path,
                          geom_type,
                          fields,
                          driver=driver,
                          sr=SRS[0]['wkt'])

    def _build_test_data():
        """Create test data of random format"""
        rng = np.random.RandomState(42)
        for fpname, fp in fps.items():
            geom = _geom_of_fp(rng, fp, test_coords_insertion)

            # Keep `_fields_of_fp` invocation before `if` for rng side effect
            fields = _fields_of_fp(rng, fp, fpname)

            if test_fields:
                yield geom, fields
            else:
                yield geom,

    data = list(_build_test_data())
    for dat in data:
        v.insert_data(*dat)
    if driver in {'GeoJson', 'DXF'}:
        # Flushing to disk for geojson
        v.deactivate()
        v.activate()

    # Step 2 - Test geometries read routines ***********************************
    _test_geom_read(v, fps, data, test_fields)

    # Step 3 - Test field read routines ****************************************
    if test_fields:
        _test_fields_read(v, data)

    v.close()
    def _test():
        ds = buzz.Dataset()
        assert len(ds) == len(ws) == 0

        prox = ds.create_raster('test', random_path_tif, fps.A, float, 1)
        ws.add(prox)
        assert len(ds) == len(ws) == 1

        prox = ds.acreate_raster('', fps.A, float, 1, driver='MEM')
        ws.add(prox)
        assert len(ds) == len(ws) == 2

        prox = ds.awrap_numpy_raster(fps.A, np.zeros(fps.A.shape))
        ws.add(prox)
        assert len(ds) == len(ws) == 3

        ws.add(ds)
        assert len(ws) == 4
示例#18
0
def test_iter_data_fields_behavior(path):
    ds = buzz.Dataset()
    with ds.acreate_vector(path, fields=[], type='point').remove as v:
        pt0, pt1 = sg.Point(1, 1), sg.Point(2, 2)
        v.insert_data(pt0)
        v.insert_data(pt1)
        assert list(v.iter_data(None)) == [pt0, pt1]
        assert list(v.iter_data(-1)) == [pt0, pt1]
        assert list(v.iter_data('')) == [(pt0, ), (pt1, )]
        assert list(v.iter_data([])) == [(pt0, ), (pt1, )]
        assert list(v.iter_data([-1])) == [(pt0, ), (pt1, )]
    with ds.acreate_vector(path,
                           fields=[dict(name='toto', type=int)],
                           type='point').remove as v:
        pt0, pt1 = sg.Point(1, 1), sg.Point(2, 2)
        v.insert_data(pt0, [42])
        v.insert_data(pt1, [43])
        assert list(v.iter_data(None)) == [pt0, pt1]
        assert list(v.iter_data(-1)) == [(pt0, 42), (pt1, 43)]
        assert list(v.iter_data('')) == [(pt0, ), (pt1, )]
        assert list(v.iter_data([])) == [(pt0, ), (pt1, )]
        assert list(v.iter_data([-1])) == [(pt0, 42), (pt1, 43)]
def test_vector_concurrent():
    def _work(i):
        point, = r1.iter_data(None)
        return point

    ds = buzz.Dataset(max_active=4)
    meta = dict(type='point', )

    p = mp.pool.ThreadPool(4)
    with ds.acreate_vector('/tmp/v1.shp', **meta).delete as r1:
        pt = sg.Point([42, 45])
        r1.insert_data(pt)
        r1.deactivate()

        points = list(p.map(_work, range(1000)))
        assert all(p == pt for p in points)
        assert (ds._back.idle_count(), ds._back.used_count(),
                ds.active_count) == (4, 0, 4)
        assert (ds._back.idle_count(r1._back.uid),
                ds._back.used_count(r1._back.uid), r1.active_count,
                r1.active) == (4, 0, 4, True)

    p.terminate()
def test_(pools, test_prefix, cache_tiles, test_prefix2):
    def _open(**kwargs):
        d = dict(fp=fp,
                 dtype='float32',
                 channel_count=2,
                 compute_array=functools.partial(_meshgrid_raster_in,
                                                 reffp=fp),
                 cache_dir=test_prefix,
                 cache_tiles=cache_tiles,
                 **dict(
                     itertools.chain(
                         pools['merge'].items(),
                         pools['resample'].items(),
                         pools['computation'].items(),
                         pools['io'].items(),
                     )))
        d.update(kwargs)
        return ds.acreate_cached_raster_recipe(**d)

    def _test_get():
        arrs = r.get_data(band=-1)
        assert arrs.shape == tuple(np.r_[fp.shape, 2])
        x, y = arrs[..., 0], arrs[..., 1]
        xref, yref = fp.meshgrid_raster
        assert np.all(x == xref)
        assert np.all(y == yref)

    def _test_resampling(fp):
        arr = r.get_data(band=-1, fp=fp)
        ref = npr.get_data(band=-1, fp=fp)
        assert np.allclose(arr, ref)

    def _corrupt_files(files):
        for path in files:
            with open(path, 'wb') as stream:
                stream.write(b'42')

    print()  # debug line
    fp = buzz.Footprint(
        rsize=(100, 100),
        size=(100, 100),
        tl=(1000, 1100),
    )
    compute_same_address_space = (type(
        pools['computation']['computation_pool'])
                                  in {str, mp.pool.ThreadPool,
                                      type(None)})

    with buzz.Dataset(allow_interpolation=1).close as ds:
        # Create a numpy raster with the same data, useful to compare resampling
        npr = ds.awrap_numpy_raster(
            fp,
            np.stack(fp.meshgrid_raster, axis=2).astype('float32'))

        # Test lazyness of cache
        r = _open()
        files = glob.glob(os.path.join(test_prefix, '*.tif'))
        assert len(files) == 0

        # Test get_data results
        _test_get()
        files = glob.glob(os.path.join(test_prefix, '*.tif'))
        assert len(files) > 0
        mtimes0 = {f: os.stat(f).st_mtime for f in files}

        # Test persistence of cache
        # Test get_data results
        r.close()
        r = _open(compute_array=_should_not_be_called)
        _test_get()
        files = glob.glob(os.path.join(test_prefix, '*.tif'))
        assert len(files) > 0
        mtimes1 = {f: os.stat(f).st_mtime for f in files}
        assert mtimes0 == mtimes1

        # Test overwrite parameter
        # Test get_data results
        r.close()
        r = _open(ow=True)
        _test_get()
        files = glob.glob(os.path.join(test_prefix, '*.tif'))
        assert len(files) > 0
        mtimes1 = {f: os.stat(f).st_mtime for f in files}
        assert mtimes0.keys() == mtimes1.keys()
        for k, t0 in mtimes0.items():
            t1 = mtimes1[k]
            assert t0 < t1

        # Test remapping #1 - Interpolation - Fully Inside
        fp_within_upscaled = fp.intersection(
            fp, scale=fp.scale / 2) & fp.erode(fp.rsemiminoraxis // 4)
        _test_resampling(fp_within_upscaled)

        # Test remapping #2 - Interpolation - Fully Outside
        _test_resampling(fp_within_upscaled.move(fp.br + fp.diagvec))

        # Test remapping #3 - No Interpolation - Fully Outside
        _test_resampling(fp.move(fp.br + fp.diagvec))

        # Test remapping #4 - Interpolation - Both in and out
        _test_resampling(
            fp_within_upscaled.move(fp.br - fp_within_upscaled.diagvec / 2))

        # Test remapping #5 - No Interpolation - Both in and out
        _test_resampling(fp.move(fp.br - fp.pxvec * fp.rsemiminoraxis))

        # Test remapping #6 - Interpolation - Fully Inside - Tiled
        r.close()
        r = _open(max_resampling_size=20)
        _test_resampling(fp_within_upscaled)

        # Concurrent queries that need a cache file checksum
        r.close()
        r = _open()
        for it in [r.iter_data(fps=[fp], band=-1) for _ in range(10)]:
            next(it)

        # Concurrent queries that need a cache file missing, all but one computation aborted
        # because already launched
        r.close()
        r = _open(ow=True)
        for it in [r.iter_data(fps=[fp], band=-1) for _ in range(10)]:
            next(it)

        # Query garbage collected
        it1 = r.iter_data(fps=[fp] * 2,
                          max_queue_size=1)  # 2/2 ready, 1/2 sinked
        it2 = r.iter_data(fps=[fp] * 1,
                          max_queue_size=1)  # 1/1 ready, 0/1 sinked
        it3 = r.iter_data(fps=[fp] * 2,
                          max_queue_size=1)  # 1/2 ready, 0/2 sinked
        next(it1)
        time.sleep(1 / 2)

        del it1, it2, it3
        gc.collect()
        time.sleep(1 / 2)
        r.get_data()  # This line will reraise any exception from scheduler

        # Raster closing during query
        it1 = r.iter_data(fps=[fp] * 2,
                          max_queue_size=1)  # 2/2 ready, 1/2 sinked
        it2 = r.iter_data(fps=[fp] * 1,
                          max_queue_size=1)  # 1/1 ready, 0/1 sinked
        it3 = r.iter_data(fps=[fp] * 2,
                          max_queue_size=1)  # 1/2 ready, 0/2 sinked
        next(it1)
        time.sleep(1 / 2)
        # Close Dataset instead of Raster, because Dataset.close is currently blocking

    with buzz.Dataset(allow_interpolation=1).close as ds:
        npr = ds.awrap_numpy_raster(
            fp,
            np.stack(fp.meshgrid_raster, axis=2).astype('float32'))

        # Corrupted cache file
        files = glob.glob(os.path.join(test_prefix, '*.tif'))
        mtimes0 = {f: os.stat(f).st_mtime for f in files}
        corrupted_path = files[0]
        _corrupt_files([corrupted_path])
        r = _open()

        r.get_data()
        mtimes1 = {f: os.stat(f).st_mtime for f in files}
        assert mtimes0.keys() == mtimes1.keys()
        for path in files:
            if path == corrupted_path:
                assert mtimes0[path] != mtimes1[path]
            else:
                assert mtimes0[path] == mtimes1[path]

    with buzz.Dataset(allow_interpolation=1).close as ds:
        npr = ds.awrap_numpy_raster(
            fp,
            np.stack(fp.meshgrid_raster, axis=2).astype('float32'))

        # In iter_data, the first one(s) don't need cache, the next ones need cache file checking and then recomputation
        _corrupt_files(glob.glob(os.path.join(test_prefix, '*.tif')))
        r = _open()
        fps = [
            fp.move(fp.br + fp.diagvec),  # Outside
        ] + [fp] * 12
        arrs = list(r.iter_data(band=-1, fps=fps))
        assert len(arrs) == 13
        for tile, arr in zip(fps, arrs):
            assert np.all(arr == npr.get_data(band=-1, fp=tile))

    with buzz.Dataset(allow_interpolation=1).close as ds:
        npr = ds.awrap_numpy_raster(
            fp,
            np.stack(fp.meshgrid_raster, axis=2).astype('float32'))

        # Test channels order versus numpy raster
        r = _open()
        for channels in [
                0,
                1,
                None,
                slice(None),
            [0, 1],
            [1, 0],
            [1, 0, 1],
        ]:
            assert np.all(
                r.get_data(channels=channels) == npr.get_data(
                    channels=channels))

    with buzz.Dataset(allow_interpolation=1).close as ds:
        # Derived and primitive rasters not computed
        if compute_same_address_space:
            ac0, ac1 = _AreaCounter(fp), _AreaCounter(fp)
        else:
            ac0, ac1 = None, None
        r0 = _open(
            compute_array=functools.partial(_base_computation,
                                            area_counter=ac0,
                                            reffp=fp),
            ow=True,
        )
        r1 = _open(
            compute_array=functools.partial(_derived_computation,
                                            area_counter=ac1,
                                            reffp=fp),
            queue_data_per_primitive={
                'prim': functools.partial(r0.queue_data, band=-1)
            },
            cache_dir=test_prefix2,
            ow=True,
        )
        assert len(r0.primitives) == 0
        assert len(r1.primitives) == 1
        assert r1.primitives['prim'] is r0

        r1.get_data()
        if compute_same_address_space:
            ac0.check_done()
            ac1.check_done()
        r0.close()
        r1.close()

        # Derived raster not computed
        if compute_same_address_space:
            ac0, ac1 = _AreaCounter(fp), _AreaCounter(fp)
        else:
            ac0, ac1 = None, None
        r0 = _open(
            compute_array=functools.partial(_base_computation,
                                            area_counter=ac0,
                                            reffp=fp),
            ow=False,
        )
        r1 = _open(
            compute_array=functools.partial(_derived_computation,
                                            area_counter=ac1,
                                            reffp=fp),
            queue_data_per_primitive={
                'prim': functools.partial(r0.queue_data, band=-1)
            },
            cache_dir=test_prefix2,
            ow=True,
        )
        r1.get_data()
        if compute_same_address_space:
            ac0.check_not_done()
            ac1.check_done()
        r0.close()
        r1.close()

        # Primitive raster not computed
        if compute_same_address_space:
            ac0, ac1 = _AreaCounter(fp), _AreaCounter(fp)
        else:
            ac0, ac1 = None, None
        r0 = _open(
            compute_array=functools.partial(_base_computation,
                                            area_counter=ac0,
                                            reffp=fp),
            ow=True,
        )
        r1 = _open(
            compute_array=functools.partial(_derived_computation,
                                            area_counter=ac1,
                                            reffp=fp),
            queue_data_per_primitive={
                'prim': functools.partial(r0.queue_data, band=-1)
            },
            cache_dir=test_prefix2,
            ow=False,
        )
        r1.get_data()
        if compute_same_address_space:
            ac0.check_not_done()
            ac1.check_not_done()
        r0.close()
        r1.close()

        # Test computation tiles
        if compute_same_address_space:
            ac0, ac1 = _AreaCounter(fp), _AreaCounter(fp)
        else:
            ac0, ac1 = None, None
        r0 = _open(
            compute_array=functools.partial(_base_computation,
                                            area_counter=ac0,
                                            reffp=fp),
            computation_tiles=(11, 11),
            ow=True,
        )
        r1 = _open(
            compute_array=functools.partial(_derived_computation,
                                            area_counter=ac1,
                                            reffp=fp),
            queue_data_per_primitive={
                'prim': functools.partial(r0.queue_data, band=-1)
            },
            cache_dir=test_prefix2,
            computation_tiles=(22, 22),
            ow=True,
        )
        r1.get_data()
        if compute_same_address_space:
            ac0.check_done()
            ac1.check_done()
        r0.close()
        r1.close()

        # Several queries, one is dropped, the rest is still working
        r0 = _open(
            compute_array=functools.partial(_base_computation, reffp=fp),
            ow=True,
        )
        r1 = _open(
            compute_array=functools.partial(_derived_computation, reffp=fp),
            queue_data_per_primitive={
                'prim': functools.partial(r0.queue_data, band=-1)
            },
            cache_dir=test_prefix2,
            ow=True,
        )
        t = r1.cache_tiles.flatten()
        fps0 = t.tolist() * 2
        fps1 = fps0[::-1]
        fps2 = np.roll(t, t.size // 2).tolist() * 2
        fps3 = fps2[::-1]

        it0 = r1.iter_data(fps=fps0)
        it1 = r1.iter_data(fps=fps1)
        it2 = r1.iter_data(fps=fps2)
        it3 = r1.iter_data(fps=fps3)
        del it1

        assert len(list(it3)) == t.size * 2
        assert len(list(it0)) == t.size * 2
        assert len(list(it2)) == t.size * 2

        r0.close()
        r1.close()

        # Computation function crashes, we catch error in main thread
        r = _open(ow=True, compute_array=_please_crash)
        with pytest.raises(NecessaryCrash):
            r.get_data()
def test_vector():
    ds = buzz.Dataset(max_active=2)
    meta = dict(type='point', )

    def statuses(*args):
        l = tuple([(ds._back.idle_count(prox._back.uid),
                    ds._back.used_count(prox._back.uid), prox.active_count,
                    prox.active) for prox in args])
        return l

    assert (ds._back.idle_count(), ds._back.used_count(),
            ds.active_count) == (0, 0, 0)
    with ds.acreate_vector('/tmp/v1.shp', **meta).delete as r1:
        r1.insert_data([0, 0])

        assert (ds._back.idle_count(), ds._back.used_count(),
                ds.active_count) == (1, 0, 1)
        assert (ds._back.idle_count(r1._back.uid),
                ds._back.used_count(r1._back.uid), r1.active_count,
                r1.active) == (1, 0, 1, True)

        # Iteration 1 - exn
        it = r1.iter_data()
        next(it)
        assert (ds._back.idle_count(), ds._back.used_count(),
                ds.active_count) == (0, 1, 1)
        assert (ds._back.idle_count(r1._back.uid),
                ds._back.used_count(r1._back.uid), r1.active_count,
                r1.active) == (0, 1, 1, True)

        try:
            next(it)
        except StopIteration:
            pass
        assert (ds._back.idle_count(), ds._back.used_count(),
                ds.active_count) == (1, 0, 1)
        assert (ds._back.idle_count(r1._back.uid),
                ds._back.used_count(r1._back.uid), r1.active_count,
                r1.active) == (1, 0, 1, True)

        # Iteration 2 - close
        it = r1.iter_data()
        next(it)
        assert (ds._back.idle_count(), ds._back.used_count(),
                ds.active_count) == (0, 1, 1)
        assert (ds._back.idle_count(r1._back.uid),
                ds._back.used_count(r1._back.uid), r1.active_count,
                r1.active) == (0, 1, 1, True)

        it.close()
        assert (ds._back.idle_count(r1._back.uid),
                ds._back.used_count(r1._back.uid), r1.active_count,
                r1.active) == (1, 0, 1, True)

        # Iteration 3 - del
        it = r1.iter_data()
        next(it)
        assert (ds._back.idle_count(), ds._back.used_count(),
                ds.active_count) == (0, 1, 1)
        assert (ds._back.idle_count(r1._back.uid),
                ds._back.used_count(r1._back.uid), r1.active_count,
                r1.active) == (0, 1, 1, True)

        del it
        assert (ds._back.idle_count(r1._back.uid),
                ds._back.used_count(r1._back.uid), r1.active_count,
                r1.active) == (1, 0, 1, True)

        # Iteration 4 - try source.close
        it = r1.iter_data()
        next(it)
        assert (ds._back.idle_count(), ds._back.used_count(),
                ds.active_count) == (0, 1, 1)
        assert (ds._back.idle_count(r1._back.uid),
                ds._back.used_count(r1._back.uid), r1.active_count,
                r1.active) == (0, 1, 1, True)

        with pytest.raises(Exception, match='deactivate'):
            r1.close()
        assert (ds._back.idle_count(), ds._back.used_count(),
                ds.active_count) == (0, 1, 1)
        assert (ds._back.idle_count(r1._back.uid),
                ds._back.used_count(r1._back.uid), r1.active_count,
                r1.active) == (0, 1, 1, True)

        del it
        assert (ds._back.idle_count(), ds._back.used_count(),
                ds.active_count) == (1, 0, 1)
        assert (ds._back.idle_count(r1._back.uid),
                ds._back.used_count(r1._back.uid), r1.active_count,
                r1.active) == (1, 0, 1, True)

        # Iteration 5 - multi
        it0 = r1.iter_data()
        next(it0)
        it1 = r1.iter_data()
        next(it1)
        assert (ds._back.idle_count(), ds._back.used_count(),
                ds.active_count) == (0, 2, 2)
        assert (ds._back.idle_count(r1._back.uid),
                ds._back.used_count(r1._back.uid), r1.active_count,
                r1.active) == (0, 2, 2, True)

        it2 = r1.iter_data()
        with pytest.raises(RuntimeError, match='simultaneous'):
            next(it2)

        assert (ds._back.idle_count(), ds._back.used_count(),
                ds.active_count) == (0, 2, 2)
        assert (ds._back.idle_count(r1._back.uid),
                ds._back.used_count(r1._back.uid), r1.active_count,
                r1.active) == (0, 2, 2, True)

        r1.activate()  # no effect
        assert (ds._back.idle_count(), ds._back.used_count(),
                ds.active_count) == (0, 2, 2)
        assert (ds._back.idle_count(r1._back.uid),
                ds._back.used_count(r1._back.uid), r1.active_count,
                r1.active) == (0, 2, 2, True)

        del it0
        assert (ds._back.idle_count(), ds._back.used_count(),
                ds.active_count) == (1, 1, 2)
        assert (ds._back.idle_count(r1._back.uid),
                ds._back.used_count(r1._back.uid), r1.active_count,
                r1.active) == (1, 1, 2, True)

        del it1
        assert (ds._back.idle_count(), ds._back.used_count(),
                ds.active_count) == (2, 0, 2)
        assert (ds._back.idle_count(r1._back.uid),
                ds._back.used_count(r1._back.uid), r1.active_count,
                r1.active) == (2, 0, 2, True)
def test_raster(fps, random_path_tif):

    def _asserts(should_exist, should_be_open, is_anonymous=False):

        exist = os.path.isfile(random_path_tif)
        assert should_exist == exist

        if not is_anonymous:
            is_open_key = 'test' in ds
            is_open_prox = test in ds
            assert is_open_key == is_open_prox
            assert is_open_key == should_be_open
            if is_open_key:
                assert test is ds.test is ds['test']
        else:
            is_open = test in ds
            assert is_open == should_be_open

        if should_be_open and not is_anonymous:
            assert ds['test'] == ds.test
        if should_be_open:
            assert len(ds) == 1
        else:
            assert len(ds) == 0

    ds = buzz.Dataset()

    # Raster test 1
    test = None
    _asserts(False, False)
    test = ds.create_raster('test', random_path_tif, fps.A, float, 1)
    _asserts(True, True)
    ds.test.close()
    _asserts(True, False)

    test = ds.aopen_raster(random_path_tif)
    _asserts(True, True, True)
    test.close()
    _asserts(True, False)

    test = ds.open_raster('test', random_path_tif, mode='w')
    _asserts(True, True)
    test.remove()
    _asserts(False, False)

    # Raster test 2 - context/close
    with ds.create_raster('test', random_path_tif, fps.A, float, 1).close as test:
        _asserts(True, True)
    _asserts(True, False)
    with ds.open_raster('test', random_path_tif, mode='w').delete as test:
        _asserts(True, True)
    _asserts(False, False)

    # Raster test 3 - context/close/anonymous
    with ds.acreate_raster(random_path_tif, fps.A, float, 1).delete as test:
        _asserts(True, True, True)
    _asserts(False, False)

    # Raster test 4 - context/delete
    with ds.create_raster('test', random_path_tif, fps.A, float, 1).delete as test:
        _asserts(True, True)
    _asserts(False, False)

    # Raster test 5 - MEM
    with ds.create_raster('test', '', fps.A, float, 1, driver='MEM').close as test:
        _asserts(False, True)

    # Raster test 6 - numpy
    with ds.wrap_numpy_raster('test', fps.A, np.zeros(fps.A.shape)).close as test:
        _asserts(False, True)

    # Raster test 7 - gc
    del ds
    ws = weakref.WeakSet()
    def _test():
        ds = buzz.Dataset()
        assert len(ds) == len(ws) == 0

        prox = ds.create_raster('test', random_path_tif, fps.A, float, 1)
        ws.add(prox)
        assert len(ds) == len(ws) == 1

        prox = ds.acreate_raster('', fps.A, float, 1, driver='MEM')
        ws.add(prox)
        assert len(ds) == len(ws) == 2

        prox = ds.awrap_numpy_raster(fps.A, np.zeros(fps.A.shape))
        ws.add(prox)
        assert len(ds) == len(ws) == 3

        ws.add(ds)
        assert len(ws) == 4

    _test()
    gc.collect()
    assert len(ws) == 0
示例#23
0
def test_mode1(fps, shp1_path, tif1_path, shp2_path, tif2_path, shp3_path,
               tif3_path):
    ds = buzz.Dataset()
    ds.open_raster('tif1', tif1_path)
    ds.open_vector('shp1', shp1_path)
    ds.open_raster('tif2', tif2_path)
    ds.open_vector('shp2', shp2_path)
    ds.open_raster('tif3', tif3_path)
    ds.open_vector('shp3', shp3_path)

    # Test SR equality
    assert sreq(
        ds.tif1.wkt_virtual,
        ds.tif1.wkt_stored,
        ds.tif1.proj4_virtual,
        ds.tif1.proj4_stored,
        ds.shp1.wkt_virtual,
        ds.shp1.wkt_stored,
        ds.shp1.proj4_virtual,
        ds.shp1.proj4_stored,
    )
    assert sreq(
        ds.tif2.wkt_virtual,
        ds.tif2.wkt_stored,
        ds.tif2.proj4_virtual,
        ds.tif2.proj4_stored,
        ds.shp2.wkt_virtual,
        ds.shp2.wkt_stored,
        ds.shp2.proj4_virtual,
        ds.shp2.proj4_stored,
    )
    assert not sreq(ds.tif1.wkt_stored, ds.tif2.wkt_stored)
    assert (None == ds.wkt == ds.proj4 == ds.tif3.wkt_virtual ==
            ds.tif3.wkt_stored == ds.tif3.proj4_virtual == ds.tif3.proj4_stored
            == ds.shp3.wkt_virtual == ds.shp3.wkt_stored ==
            ds.shp3.proj4_virtual == ds.shp3.proj4_stored)

    # Test footprints equality
    assert fpeq(
        fps.AI,
        # tif/shp 1
        ds.tif1.fp,
        ds.tif1.fp_origin,
        buzz.Footprint.of_extent(ds.shp1.extent, fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp1.extent_stored, fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp1.bounds[[0, 2, 1, 3]], fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp1.bounds_stored[[0, 2, 1, 3]],
                                 fps.AI.scale),
        # tif/shp 2
        ds.tif2.fp,
        ds.tif2.fp_origin,
        buzz.Footprint.of_extent(ds.shp2.extent, fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp2.extent_stored, fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp2.bounds[[0, 2, 1, 3]], fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp2.bounds_stored[[0, 2, 1, 3]],
                                 fps.AI.scale),
        # tif/shp 3
        ds.tif3.fp,
        ds.tif3.fp_origin,
        buzz.Footprint.of_extent(ds.shp3.extent, fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp3.extent_stored, fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp3.bounds[[0, 2, 1, 3]], fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp3.bounds_stored[[0, 2, 1, 3]],
                                 fps.AI.scale),
    )

    # Test what's written in all 6 files
    tif1 = ds.tif1.get_data()
    tif2 = ds.tif2.get_data()
    tif3 = ds.tif3.get_data()
    assert np.all(tif1 == tif2)
    assert np.all(tif1 == tif3)
    for i, letter in enumerate(string.ascii_uppercase[:9]):
        # tif/shp 1
        shp1 = ds.shp1.get_data(i, None)
        raster1_polys = ds.tif1.fp.find_polygons(tif1 == ord(letter))
        assert len(raster1_polys) == 1
        assert (shp1 ^ raster1_polys[0]).is_empty

        # tif/shp 2
        shp2 = ds.shp2.get_data(i, None)
        raster2_polys = ds.tif2.fp.find_polygons(tif2 == ord(letter))
        assert len(raster2_polys) == 1
        assert (shp2 ^ raster2_polys[0]).is_empty

        # tif/shp 3
        shp3 = ds.shp3.get_data(i, None)
        raster3_polys = ds.tif3.fp.find_polygons(tif3 == ord(letter))
        assert len(raster3_polys) == 1
        assert (shp3 ^ raster3_polys[0]).is_empty
示例#24
0
def main():
    return  # None of the features shown here are implemented yet
    ds = buzz.Dataset(allow_interpolation=True)
    pixel_per_line = {
        'mand_100px': 10,
        'mand_10kpx': 100,
        'mand_1mpx': 1_000,
        'mand_100mpx': 10_000,
        'mand_10gpx': 100_000,
        'mand_1tpx': 1_000_000,
    }

    # Instanciate 6 fixed scale mandelbrot rasters
    for key, rwidth in pixel_per_line.items():
        # Create a Footprint that ranges from -2 to 2 on both x and y axes
        fp = buzz.Footprint(
            gt=(-2, 4 / rwidth, 0, -2, 0, 4 / rwidth),
            rsize=(rwidth, rwidth),
        )
        ds.create_raster_recipe(
            key,
            fp=fp,
            dtype='float32',
            channel_count=1,
            compute_array=mandelbrot_of_footprint,
            automatic_remapping=True,  # True is the default value
            max_computation_size=128,
        )

    # Instanciate 1 flexible scale mandelbrot raster
    # The fp parameter does not mean much when `automatic_remapping=False`, but
    # it is still mandatory.
    ds.create_raster_recipe(
        'mand',
        fp=ds.mand_10kpx.fp,
        dtype='float32',
        channel_count=1,
        compute_array=mandelbrot_of_footprint,
        automatic_remapping=False,
        max_computation_size=128,
    )

    # Test 1 - Perform basic tests ****************************************** **
    test_raster(ds.mand_100px)
    test_raster(ds.mand_10kpx)
    test_raster(ds.mand_1mpx)

    # Test 2 - Play with resampling with the non-flexible scale rasters ***** **
    fp100k = buzz.Footprint(
        gt=(-2, 4 / 316, 0, -2, 0, 4 / 316),
        rsize=(316, 316),
    )
    example_tools.show_several_images(
        ('10kpx', ds.mand_10kpx.fp, ds.mand_10kpx.get_data()),
        ('1mpx', ds.mand_1mpx.fp, ds.mand_1mpx.get_data()),
        ('10kpx to 100kpx', fp100k,
         ds.mand_10kpx.get_data(fp=fp100k)),  # upsample * 10
        ('1mpx to 100kpx', fp100k,
         ds.mand_1mpx.get_data(fp=fp100k)),  # downsample * 10
    )

    # Test 3 - Play with with the flexible scale raster ********************* **
    example_tools.show_several_images(
        ('10kpx', ds.mand_10kpx.fp, ds.mand.get_data(fp=ds.mand_10kpx.fp)),
        ('1mpx', ds.mand_1mpx.fp, ds.mand.get_data(fp=ds.mand_1mpx.fp)),
    )

    # Test 4 - Zoom to a point ********************************************** **
    focus = shapely.geometry.Point(-1.1172, -0.221103)
    for key in pixel_per_line.items():
        fp = ds[key].fp
        fp = fp.dilate(250) & focus.buffer(fp.pxsizex * 250)
        arr = ds[key].get_data(fp=fp)
        title = f'{fp.rw}x{fp.rh} rect of the {key} image'
        example_tools.show_several_images((title, fp, arr))

    ds.close()
def test_vector(fps, random_path_shp):

    def _asserts(should_exist, should_be_open, is_anonymous=False):

        exist = os.path.isfile(random_path_shp)
        assert should_exist == exist

        if not is_anonymous:
            is_open_key = 'test' in ds
            is_open_prox = test in ds
            assert is_open_key == is_open_prox
            assert is_open_key == should_be_open
            if is_open_key:
                assert test is ds.test is ds['test']
        else:
            is_open = test in ds
            assert is_open == should_be_open

        if should_be_open and not is_anonymous:
            assert ds['test'] == ds.test
        if should_be_open:
            assert len(ds) == 1
        else:
            assert len(ds) == 0

    ds = buzz.Dataset()

    # Vector test 1
    test = None
    _asserts(False, False)
    test = ds.create_vector('test', random_path_shp, 'polygon')
    _asserts(True, True)
    ds.test.close()
    _asserts(True, False)

    test = ds.aopen_vector(random_path_shp)
    _asserts(True, True, True)
    test.close()
    _asserts(True, False)

    test = ds.open_vector('test', random_path_shp, mode='w')
    _asserts(True, True)
    test.remove()
    _asserts(False, False)

    # Vector test 2 - context/close
    with ds.create_vector('test', random_path_shp, 'polygon').close as test:
        _asserts(True, True)
    _asserts(True, False)
    with ds.open_vector('test', random_path_shp, mode='w').delete as test:
        _asserts(True, True)
    _asserts(False, False)

    # Vector test 3 - context/close/anonymous
    with ds.acreate_vector(random_path_shp, 'polygon').delete as test:
        _asserts(True, True, True)
    _asserts(False, False)

    # Vector test 4 - context/delete
    with ds.create_vector('test', random_path_shp, 'polygon').delete as test:
        _asserts(True, True)
    _asserts(False, False)

    # Vector test 5 - MEM
    with ds.create_vector('test', '', 'polygon', driver='Memory').close as test:
        _asserts(False, True)

    # Vector test 6 - gc
    del ds
    ws = weakref.WeakSet()
    def _test():
        ds = buzz.Dataset()
        assert len(ds) == len(ws) == 0

        prox = ds.create_vector('test', random_path_shp, 'polygon')
        ws.add(prox)
        assert len(ds) == len(ws) == 1

        prox = ds.acreate_vector('', 'polygon', driver='Memory')
        ws.add(prox)
        assert len(ds) == len(ws) == 2

        ws.add(ds)
        assert len(ws) == 3

    _test()
    gc.collect()
    assert len(ws) == 0
示例#26
0
def main():
    return # None of the features shown here are implemented yet
    path = example_tools.create_random_elevation_gtiff()
    ds = buzz.Dataset()

    # Pool to parallelize:
    # - `ds.slopes` computations
    # - `ds.elevation` resamplings
    cpu_pool = mp.pool.ThreadPool(mp.cpu_count())

    # Pool to parallelize:
    # - `ds.elevation` disk reads
    io_pool = mp.pool.ThreadPool(4)

    ds.open_raster(
        'elevation',
        path=path,
        async_={'io_pool': io_pool, 'resample_pool': cpu_pool},
    )
    ds.create_raster_recipe(
        'slopes',
        computation_pool=cpu_pool,

        # The next 6 lines can be replaced by **buzz.algo.slopes(ds.elevation)
        fp=ds.elevation.fp,
        dtype='float32',
        channel_count=1,
        compute_array=slopes_of_elevation,
        queue_data_per_primitive={'dem': ds.elevation.queue_data},
        convert_footprint_per_primitive={'dem': lambda fp: fp.dilate(1)},
    )

    # Test 1 - Perform basic tests ****************************************** **
    # `test_raster` will request `slopes`'s' pixels. `elevation`'s' pixels will
    # be requested in cascade and then used to compute the `slopes`.
    test_raster(ds.slopes)

    # Test 2 - Multiple iterations at the same time ************************* **
    # Here the `elevation` raster is directly requested and also requested by
    # the `slopes`, the Dataset's scheduler is made to handle simultaneous
    # queries.
    tiles = ds.elevation.fp.tile_count(2, 2).flatten()
    dem_iterator = ds.elevation.iter_data(tiles)
    slopes_iterator = ds.slopes.iter_data(tiles)
    for tile, dem, slopes in zip(tiles, dem_iterator, slopes_iterator):
        print(f'Showing dem and slopes at:\n {tile}')
        example_tools.show_several_images(
            ('elevation (dem)', tile, dem),
            ('slopes', tile, slopes),
        )

    # Test 3 - Backpressure prevention ************************************** **
    tiles = ds.slopes.tile_count(3, 3).flatten()

    print('Creating a slopes iterator on 9 tiles')
    it = ds.slopes.iter_data(tiles, max_queue_size=1)
    print('  At most 5 dem arrays can be ready between `ds.elevation` and '
          '`ds.slopes`')
    print('  At most 1 slopes array can be ready out of the slopes iterator')

    print('Sleeping several seconds to let the scheduler create 6 of the 9 '
          'dem arrays, and 1 of the 9 slopes arrays.')
    time.sleep(4)

    with example_tools.Timer() as t:
        arr = next(it)
    print(f'Getting the first array took {t}, this was instant because it was '
          'ready')

    with example_tools.Timer() as t:
        for _ in range(5):
            next(it)
    print(f'Getting the next 5 arrays took {t}, it was quick because the dems '
          'were ready')

    with example_tools.Timer() as t:
        for _ in range(3):
            next(it)
    print(f'Getting the last 4 arrays took {t}, it was long because nothing was'
          ' ready')

    # Cleanup *************************************************************** **
    ds.close()
    os.remove(path)
示例#27
0
def test_mode4(fps, shp1_path, tif1_path, shp2_path, tif2_path, shp3_path,
               tif3_path, random_path_shp, random_path_tif, env):
    ds = buzz.Dataset(sr_work=SR1['wkt'], sr_forced=SR2['wkt'])
    ds.open_raster('tif1', tif1_path)
    ds.open_vector('shp1', shp1_path)
    ds.open_raster('tif2', tif2_path)
    ds.open_vector('shp2', shp2_path)
    ds.open_raster('tif3', tif3_path)
    ds.open_vector('shp3', shp3_path)

    # Test file creation without spatial reference
    with buzz.Env(allow_complex_footprint=True):
        with ds.acreate_vector(random_path_shp, 'polygon', [],
                               sr=None).close as r:
            assert r.wkt_stored == None
            assert sreq(r.wkt_virtual, SR2['wkt'])
        with ds.acreate_raster(random_path_tif,
                               fps.AI,
                               'int32',
                               1, {},
                               sr=None).close as v:
            assert v.wkt_stored == None
            assert sreq(v.wkt_virtual, SR2['wkt'])

    # Test SR equality
    assert sreq(
        ds.wkt,
        ds.proj4,
        ds.tif1.wkt_stored,
        ds.tif1.proj4_stored,
        ds.shp1.wkt_stored,
        ds.shp1.proj4_stored,
    )
    assert sreq(
        ds.tif1.wkt_virtual,
        ds.tif1.proj4_virtual,
        ds.shp1.wkt_virtual,
        ds.shp1.proj4_virtual,
        ds.tif2.wkt_virtual,
        ds.tif2.proj4_virtual,
        ds.shp2.wkt_virtual,
        ds.shp2.proj4_virtual,
        ds.tif2.wkt_stored,
        ds.tif2.proj4_stored,
        ds.shp2.wkt_stored,
        ds.shp2.proj4_stored,
        ds.tif3.wkt_virtual,
        ds.tif3.proj4_virtual,
        ds.shp3.wkt_virtual,
        ds.shp3.proj4_virtual,
    )
    assert not sreq(ds.tif1.wkt_stored, ds.tif2.wkt_stored)
    assert (None == ds.tif3.wkt_stored == ds.tif3.proj4_stored ==
            ds.shp3.wkt_stored == ds.shp3.proj4_stored)

    # Test foorprints equality
    assert fpeq(
        fps.AI,
        # tif/shp 1
        ds.tif1.fp_origin,
        buzz.Footprint.of_extent(ds.shp1.extent_stored, fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp1.bounds_stored[[0, 2, 1, 3]],
                                 fps.AI.scale),

        # tif/shp 2
        ds.tif2.fp_origin,
        buzz.Footprint.of_extent(ds.shp2.extent_stored, fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp2.bounds_stored[[0, 2, 1, 3]],
                                 fps.AI.scale),

        # tif/shp 3
        ds.tif3.fp_origin,
        buzz.Footprint.of_extent(ds.shp3.extent_stored, fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp3.bounds_stored[[0, 2, 1, 3]],
                                 fps.AI.scale),
    )
    assert fpeq(
        # tif/shp 1
        ds.tif1.fp,
        buzz.Footprint.of_extent(ds.shp1.extent, fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp1.bounds[[0, 2, 1, 3]], fps.AI.scale),

        # tif/shp 2
        ds.tif2.fp,
        buzz.Footprint.of_extent(ds.shp2.extent, fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp2.bounds[[0, 2, 1, 3]], fps.AI.scale),

        # tif/shp 3
        ds.tif3.fp,
        buzz.Footprint.of_extent(ds.shp3.extent, fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp3.bounds[[0, 2, 1, 3]], fps.AI.scale),
    )
    assert ds.tif1.fp != ds.tif1.fp_stored

    # Test what's written in all 6 files
    tif1 = ds.tif1.get_data()
    tif2 = ds.tif2.get_data()
    tif3 = ds.tif3.get_data()
    assert np.all(tif1 == tif2)
    assert np.all(tif1 == tif3)

    def f(x, y, z=None):
        return np.around(x, 6), np.around(y, 6)

    for i, letter in enumerate(string.ascii_uppercase[:9]):
        # tif/shp 1
        shp1 = ds.shp1.get_data(i, None)
        shp1 = shapely.ops.transform(f, shp1)

        raster1_polys = ds.tif1.fp.find_polygons(tif1 == ord(letter))
        assert len(raster1_polys) == 1
        raster1_poly = raster1_polys[0]
        raster1_poly = shapely.ops.transform(f, shp1)

        assert (shp1 ^ raster1_poly).is_empty

        # tif/shp 2
        shp2 = ds.shp2.get_data(i, None)
        shp2 = shapely.ops.transform(f, shp2)

        raster2_polys = ds.tif2.fp.find_polygons(tif2 == ord(letter))
        assert len(raster2_polys) == 1
        raster2_poly = raster2_polys[0]
        raster2_poly = shapely.ops.transform(f, shp2)

        assert (shp2 ^ raster2_poly).is_empty

        # tif/shp 3
        shp3 = ds.shp3.get_data(i, None)
        shp3 = shapely.ops.transform(f, shp3)

        raster3_polys = ds.tif3.fp.find_polygons(tif3 == ord(letter))
        assert len(raster3_polys) == 1
        raster3_poly = raster3_polys[0]
        raster3_poly = shapely.ops.transform(f, shp3)

        assert (shp3 ^ raster3_poly).is_empty
示例#28
0
def example():
    ds = buzz.Dataset()

    # Create a Footprint that ranges from -2 to 2 on both x and y axes
    fp = buzz.Footprint(
        gt=(-2, 4 / 10000, 0, -2, 0, 4 / 10000),
        rsize=(10000, 10000),
    )
    cache_tiling = fp.tile((512, 512), boundary_effect='shrink')
    computation_tiling = fp.tile((128, 128), boundary_effect='shrink')
    cached_recipe_params = dict(
        key='mand_100mpx',
        fp=fp,
        dtype='float32',
        channel_count=1,
        compute_array=mandelbrot_of_footprint,
        cache_dir=CACHE_DIR,
        cache_tiles=cache_tiling,
        computation_tiles=computation_tiling,
    )
    ds.create_cached_raster_recipe(**cached_recipe_params)

    # Test 1 - Timings before and after caching ***************************** **
    print('Test 1 - Read mandelbrot 100mpx twice and compare timings')
    fp = ds.mand_100mpx.fp
    fp = fp & shapely.geometry.Point(-1.1172, -0.221103).buffer(
        fp.pxsizex * 300)
    print(f'Getting Footprint at {fp.c}...')
    with example_tools.Timer() as t:
        ds.mand_100mpx.get_data(fp=fp)
    print(f'  took {t}')

    print(f'Getting Footprint at {fp.c}...')
    with example_tools.Timer() as t:
        ds.mand_100mpx.get_data(fp=fp)
    print(f'  took {t}')
    print('Tiles in `{}` directory:\n- {}'.format(
        CACHE_DIR,
        '\n- '.join(example_tools.list_cache_files_path_in_dir(CACHE_DIR)),
    ))
    print()

    # Test 2 - Corrupt one cache file and try to reuse the file ************* **
    print('Test 2 - Corrupt one cache file and try to reuse the file')
    ds.mand_100mpx.close()

    # Pick one cache file and append one byte to it
    one_cache_tile_path = example_tools.list_cache_files_path_in_dir(
        CACHE_DIR)[0]
    print(f'Corrupting {one_cache_tile_path}...')
    with open(one_cache_tile_path, 'ba') as f:
        f.write(b'\0x42')
    ds.create_cached_raster_recipe(**cached_recipe_params)

    print(f'Getting Footprint at {fp.c}...')
    with example_tools.Timer() as t:
        arr = ds.mand_100mpx.get_data(fp=fp)
    print(f'  took {t}')

    example_tools.show_several_images((
        'part of mandelbrot 100 mega pixels',
        fp,
        arr,
    ))

    return  # The NEXT features are not yet implemented

    # Test 4 - Colorize mandelbrot 100mpx ************************************ **
    ds.create_raster_recipe(
        'mand_red',
        fp=fp,
        dtype='uint8',
        channel_count=3,
        compute_array=colorize_mandelbrot,
        queue_data_per_primitive={'mand': ds.mand_100mpx.queue_data},
        computation_tiles=computation_tiling,
        automatic_remapping=False,
    )
    example_tools.show_several_images((
        'part of mandelbrot 10 mega pixels in red',
        fp,
        ds.mand_red.get_data(fp=fp),
    ))
示例#29
0
def main():
    print("All images shown here belong to ESA/Hubble. See spacetelescope.org.\n")

    ds = buzz.Dataset(allow_interpolation=True)
    open_zoomable_rasters(ds, 'andromeda', overwrite=True)

    # Test 1 - Perform basic tests ****************************************** **
    print()
    print('Test 1 - Show andromeda with 3 resolutions')
    test_raster(ds.andromeda_zoom0)
    example_tools.show_several_images((
        'andromeda_zoom0', ds.andromeda_zoom0.fp,
        ds.andromeda_zoom0.get_data()
    ))
    print()

    test_raster(ds.andromeda_zoom1)
    example_tools.show_several_images((
        'andromeda_zoom1', ds.andromeda_zoom1.fp,
        ds.andromeda_zoom1.get_data()
    ))
    print()

    test_raster(ds.andromeda_zoom2)
    example_tools.show_several_images((
        'andromeda_zoom2', ds.andromeda_zoom2.fp,
        ds.andromeda_zoom2.get_data()
    ))
    print()

    # Test 2 - Test `get_data` timings ************************************** **
    print()
    print('Test 2 - Read andromeda 4 times and compare timings')
    with example_tools.Timer() as t:
        ds.andromeda_zoom5.get_data()
    print(f'Getting andromeda_zoom5 took {t}, download was performed')

    with example_tools.Timer() as t:
        ds.andromeda_zoom5.get_data()
    print(f'Getting andromeda_zoom5 took {t}, data was directly fetched from cache')

    print('Closing and opening andromeda rasters again...')
    ds.close()
    ds = buzz.Dataset(allow_interpolation=True)
    open_zoomable_rasters(ds, 'andromeda', overwrite=False)

    with example_tools.Timer() as t:
        ds.andromeda_zoom5.get_data()
    print(f'Getting andromeda_zoom5 took {t}, cache files validity was checked'
          ' and data was fetched from cache')

    with example_tools.Timer() as t:
        ds.andromeda_zoom5.get_data()
    print(f'Getting andromeda_zoom5 took {t}, data was directly fetched from cache')

    example_tools.show_several_images((
        'andromeda_zoom5', ds.andromeda_zoom5.fp,
        ds.andromeda_zoom5.get_data()
    ))

    # Test 3 **************************************************************** **
    print()
    print('Test 2 - Show monocerotis')
    open_zoomable_rasters(ds, 'monocerotis', overwrite=False)
    example_tools.show_several_images((
        'monocerotis_zoom3', ds.monocerotis_zoom3.fp,
        ds.monocerotis_zoom3.get_data()
    ))
示例#30
0
def test_mode2(fps, shp1_path, tif1_path, shp2_path, tif2_path, shp3_path,
               tif3_path, random_path_shp, random_path_tif, env):
    ds = buzz.Dataset(sr_work=SR1['wkt'])
    ds.open_raster('tif1', tif1_path)
    ds.open_vector('shp1', shp1_path)
    ds.open_raster('tif2', tif2_path)
    ds.open_vector('shp2', shp2_path)

    # Test file creation/opening without spatial reference
    with buzz.Env(allow_complex_footprint=True):
        with pytest.raises(ValueError, match='spatial refe'):
            ds.acreate_vector(random_path_shp, 'polygon', [], sr=None)
        with pytest.raises(ValueError, match='spatial refe'):
            ds.acreate_raster(random_path_tif, fps.AI, 'int32', 1, {}, sr=None)
        with pytest.raises(ValueError, match='spatial refe'):
            ds.aopen_raster(tif3_path)
        with pytest.raises(ValueError, match='spatial refe'):
            ds.aopen_vector(shp3_path)

    # Test SR equality
    assert sreq(
        ds.wkt,
        ds.proj4,
        ds.tif1.wkt_virtual,
        ds.tif1.wkt_stored,
        ds.tif1.proj4_virtual,
        ds.tif1.proj4_stored,
        ds.shp1.wkt_virtual,
        ds.shp1.wkt_stored,
        ds.shp1.proj4_virtual,
        ds.shp1.proj4_stored,
    )
    assert sreq(
        ds.tif2.wkt_virtual,
        ds.tif2.proj4_virtual,
        ds.shp2.wkt_virtual,
        ds.shp2.proj4_virtual,
        ds.tif2.wkt_stored,
        ds.tif2.proj4_stored,
        ds.shp2.wkt_stored,
        ds.shp2.proj4_stored,
    )
    assert not sreq(ds.tif1.wkt_stored, ds.tif2.wkt_stored)

    # Test foorprints equality
    assert fpeq(
        fps.AI,
        # tif/shp 1
        ds.tif1.fp,
        ds.tif1.fp_origin,
        buzz.Footprint.of_extent(ds.shp1.extent, fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp1.extent_stored, fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp1.bounds[[0, 2, 1, 3]], fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp1.bounds_stored[[0, 2, 1, 3]],
                                 fps.AI.scale),

        # tif/shp 2
        ds.tif2.fp_origin,
        buzz.Footprint.of_extent(ds.shp2.extent_stored, fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp2.bounds_stored[[0, 2, 1, 3]],
                                 fps.AI.scale),
    )
    assert fpeq(
        ds.tif2.fp,
        buzz.Footprint.of_extent(ds.shp2.extent, fps.AI.scale),
        buzz.Footprint.of_extent(ds.shp2.bounds[[0, 2, 1, 3]], fps.AI.scale),
    )
    assert ds.tif2.fp != ds.tif1.fp

    # Test file creation with/without conversion of footprint
    with buzz.Env(allow_complex_footprint=True):
        with ds.acreate_raster(random_path_tif,
                               fps.AI,
                               'int32',
                               1,
                               sr=SR1['wkt'],
                               ow=1).delete as r:
            assert fpeq(fps.AI, r.fp, r.fp_origin)
        with ds.acreate_raster(random_path_tif,
                               fps.AI,
                               'int32',
                               1,
                               sr=SR2['wkt'],
                               ow=1).delete as r:
            assert fpeq(
                fps.AI,
                r.fp,
            )
            assert fps.AI != r.fp_origin

    # Test what's written in all 4 files
    tif1 = ds.tif1.get_data()
    tif2 = ds.tif2.get_data()
    assert np.all(tif1 == tif2)

    def f(x, y, z=None):
        return np.around(x, 6), np.around(y, 6)

    for i, letter in enumerate(string.ascii_uppercase[:9]):
        # tif/shp 1
        shp1 = ds.shp1.get_data(i, None)
        shp1 = shapely.ops.transform(f, shp1)

        raster1_polys = ds.tif1.fp.find_polygons(tif1 == ord(letter))
        assert len(raster1_polys) == 1
        raster1_poly = raster1_polys[0]
        raster1_poly = shapely.ops.transform(f, shp1)

        assert (shp1 ^ raster1_poly).is_empty

        # tif/shp 2
        shp2 = ds.shp2.get_data(i, None)
        shp2 = shapely.ops.transform(f, shp2)

        raster2_polys = ds.tif2.fp.find_polygons(tif2 == ord(letter))
        assert len(raster2_polys) == 1
        raster2_poly = raster2_polys[0]
        raster2_poly = shapely.ops.transform(f, shp2)

        assert (shp2 ^ raster2_poly).is_empty