def test_new_xr_load(data_folder):
    base = "file://" + str(data_folder) + "/metadata.yml"

    rdr = mk_rio_driver()
    assert rdr is not None

    _bands = []

    def band_info_collector(bands, ctx):
        for b in bands:
            _bands.append(b)

    tee_new_load_context(rdr, band_info_collector)

    band_a = dict(name='a', path='test.tif')

    band_b = dict(name='b', band=2, path='test.tif')

    ds = mk_sample_dataset([band_a, band_b], base)

    sources = Datacube.group_datasets([ds], 'time')

    im, meta = rio_slurp(str(data_folder) + '/test.tif')
    measurements = [ds.type.measurements[n] for n in ('a', 'b')]

    xx, _ = xr_load(sources, meta.gbox, measurements, rdr)

    assert len(_bands) == 2

    assert im[0].shape == xx.a.isel(time=0).shape
    assert im[1].shape == xx.b.isel(time=0).shape

    np.testing.assert_array_equal(im[0], xx.a.values[0])
    np.testing.assert_array_equal(im[1], xx.b.values[0])
Esempio n. 2
0
def check_legacy_open(index):
    from datacube.api.core import Datacube
    dc = Datacube(index=index)

    data_array = dc.load(product='ls5_nbar_albers',
                         measurements=['blue'],
                         time='1992-03-23T23:14:25.500000',
                         use_threads=True)
    assert data_array['blue'].shape[0] == 1
    assert (data_array.blue != -999).any()

    # force fusing load by duplicating dataset
    dss = dc.find_datasets(product='ls5_nbar_albers',
                           time='1992-03-23T23:14:25.500000')

    assert len(dss) == 1

    dss = dss*2
    sources = dc.group_datasets(dss, query_group_by('time'))

    gbox = data_array.geobox
    mm = [dss[0].type.measurements['blue']]
    xx = dc.load_data(sources, gbox, mm)
    assert (xx == data_array).all()

    with rasterio.Env():
        xx_lazy = dc.load_data(sources, gbox, mm, dask_chunks={'time': 1})
        assert xx_lazy['blue'].data.dask
        assert xx_lazy.blue[0, :, :].equals(xx.blue[0, :, :])