def check_open_with_dc(index): from datacube.api.core import Datacube dc = Datacube(index=index) data_array = dc.load(product='ls5_nbar_albers', variables=['blue'], stack='variable') assert data_array.shape data_array = dc.load(product='ls5_nbar_albers', latitude=(-34, -35), longitude=(149, 150), stack='variable') assert data_array.shape dataset = dc.load(product='ls5_nbar_albers', variables=['blue']) assert dataset['blue'].size dataset = dc.load(product='ls5_nbar_albers', latitude=(-35.2, -35.3), longitude=(149.1, 149.2)) assert dataset['blue'].size data_array = dc.load(product='ls5_nbar_albers', latitude=(-34, -35), longitude=(149, 150), variables=['blue'], group_by='solar_day') products_df = dc.list_products() assert len(products_df) assert len(products_df[products_df['name'].isin(['ls5_nbar_albers'])]) assert len(products_df[products_df['name'].isin(['ls5_pq_albers'])]) assert len(dc.list_measurements())
def check_legacy_open(index): from datacube.api.core import Datacube dc = Datacube(index=index) data_array = dc.load(product='ls5_nbar_albers', measurements=['blue'], time='1992-03-23T23:14:25.500000', use_threads=True) assert data_array['blue'].shape[0] == 1 assert (data_array.blue != -999).any() # force fusing load by duplicating dataset dss = dc.find_datasets(product='ls5_nbar_albers', time='1992-03-23T23:14:25.500000') assert len(dss) == 1 dss = dss*2 sources = dc.group_datasets(dss, query_group_by('time')) gbox = data_array.geobox mm = [dss[0].type.measurements['blue']] xx = dc.load_data(sources, gbox, mm) assert (xx == data_array).all() with rasterio.Env(): xx_lazy = dc.load_data(sources, gbox, mm, dask_chunks={'time': 1}) assert xx_lazy['blue'].data.dask assert xx_lazy.blue[0, :, :].equals(xx.blue[0, :, :])
def check_load_via_dss(index): dc = Datacube(index=index) dss = dc.find_datasets(product='ls5_nbar_albers') assert len(dss) > 0 xx1 = dc.load(product='ls5_nbar_albers', measurements=['blue']) xx2 = dc.load(datasets=dss, measurements=['blue']) assert xx1.blue.shape assert (xx1.blue != -999).any() assert (xx1.blue == xx2.blue).all() xx2 = dc.load(datasets=iter(dss), measurements=['blue']) assert xx1.blue.shape assert (xx1.blue != -999).any() assert (xx1.blue == xx2.blue).all() with pytest.raises(ValueError): dc.load(measurements=['blue'])
def check_open_with_dc(index): from datacube.api.core import Datacube dc = Datacube(index=index) data_array = dc.load(product='ls5_nbar_albers', measurements=['blue'], stack='variable') assert data_array.shape assert (data_array != -999).any() data_array = dc.load(product='ls5_nbar_albers', measurements=['blue'], time='1992-03-23T23:14:25.500000') assert data_array['blue'].shape[0] == 1 assert (data_array.blue != -999).any() data_array = dc.load(product='ls5_nbar_albers', measurements=['blue'], latitude=-35.3, longitude=149.1) assert data_array['blue'].shape[1:] == (1, 1) assert (data_array.blue != -999).any() data_array = dc.load(product='ls5_nbar_albers', latitude=(-35, -36), longitude=(149, 150), stack='variable') assert data_array.ndim == 4 assert 'variable' in data_array.dims assert (data_array != -999).any() with rasterio.Env(): lazy_data_array = dc.load(product='ls5_nbar_albers', latitude=(-35, -36), longitude=(149, 150), stack='variable', dask_chunks={'time': 1, 'x': 1000, 'y': 1000}) assert lazy_data_array.data.dask assert lazy_data_array.ndim == data_array.ndim assert 'variable' in lazy_data_array.dims assert lazy_data_array[1, :2, 950:1050, 950:1050].equals(data_array[1, :2, 950:1050, 950:1050]) dataset = dc.load(product='ls5_nbar_albers', measurements=['blue']) assert dataset['blue'].size dataset = dc.load(product='ls5_nbar_albers', latitude=(-35.2, -35.3), longitude=(149.1, 149.2)) assert dataset['blue'].size with rasterio.Env(): lazy_dataset = dc.load(product='ls5_nbar_albers', latitude=(-35.2, -35.3), longitude=(149.1, 149.2), dask_chunks={'time': 1}) assert lazy_dataset['blue'].data.dask assert lazy_dataset.blue[:2, :100, :100].equals(dataset.blue[:2, :100, :100]) assert lazy_dataset.isel(time=slice(0, 2), x=slice(950, 1050), y=slice(950, 1050)).equals( dataset.isel(time=slice(0, 2), x=slice(950, 1050), y=slice(950, 1050))) dataset_like = dc.load(product='ls5_nbar_albers', measurements=['blue'], like=dataset) assert (dataset.blue == dataset_like.blue).all() data_array = dc.load(product='ls5_nbar_albers', latitude=(-35, -36), longitude=(149, 150), measurements=['blue'], group_by='solar_day') dataset = dc.load(product='ls5_nbar_albers', latitude=(-35.2, -35.3), longitude=(149.1, 149.2), align=(5, 20)) assert dataset.geobox.affine.f % abs(dataset.geobox.affine.e) == 5 assert dataset.geobox.affine.c % abs(dataset.geobox.affine.a) == 20 dataset_like = dc.load(product='ls5_nbar_albers', measurements=['blue'], like=dataset) assert (dataset.blue == dataset_like.blue).all() products_df = dc.list_products() assert len(products_df) assert len(products_df[products_df['name'].isin(['ls5_nbar_albers'])]) assert len(products_df[products_df['name'].isin(['ls5_pq_albers'])]) assert len(dc.list_measurements()) resamp = ['nearest', 'cubic', 'bilinear', 'cubic_spline', 'lanczos', 'average'] results = {} # WTF def calc_max_change(da): midline = int(da.shape[0] * 0.5) a = int(abs(da[midline, :-1].data - da[midline, 1:].data).max()) centerline = int(da.shape[1] * 0.5) b = int(abs(da[:-1, centerline].data - da[1:, centerline].data).max()) return a + b for resamp_meth in resamp: dataset = dc.load(product='ls5_nbar_albers', measurements=['blue'], latitude=(-35.28, -35.285), longitude=(149.15, 149.155), output_crs='EPSG:4326', resolution=(-0.0000125, 0.0000125), resampling=resamp_meth) results[resamp_meth] = calc_max_change(dataset.blue.isel(time=0)) assert results['cubic_spline'] < results['nearest'] assert results['lanczos'] < results['average']