def test_new_datasource_fallback():
    bands = [dict(name='green', path='')]
    dataset = mk_sample_dataset(bands, 'file:///foo', format='GeoTiff')

    assert dataset.uri_scheme == 'file'

    rdr = new_datasource(BandInfo(dataset, 'green'))
    assert rdr is not None
    assert isinstance(rdr, RasterDatasetDataSource)

    # check that None format works
    band = BandInfo(mk_sample_dataset(bands, 'file:///file', format=None),
                    'green')
    rdr = new_datasource(band)
    assert rdr is not None
    assert isinstance(rdr, RasterDatasetDataSource)
Exemple #2
0
def read_data(datasets, measurements, geobox, use_overviews=False, **kwargs):
    #pylint: disable=too-many-locals, dict-keys-not-iterating
    if not hasattr(datasets, "__iter__"):
        datasets = [datasets]
    holder = numpy.empty(shape=tuple(), dtype=object)
    holder[()] = datasets
    sources = xarray.DataArray(holder)
    if use_overviews:
        all_bands = xarray.Dataset()
        for name, coord in geobox.coordinates.items():
            all_bands[name] = (name, coord.values, {'units': coord.units})

        for measurement in measurements:
            datasources = [
                new_datasource(d, measurement['name']) for d in datasets
            ]
            datasources = sorted(datasources, key=lambda x: x._dataset.id)
            data = _get_measurement(datasources, geobox, measurement['nodata'],
                                    measurement['dtype'])
            coords = OrderedDict(
                (dim, sources.coords[dim]) for dim in sources.dims)
            dims = tuple(coords.keys()) + tuple(geobox.dimensions)
            all_bands[measurement['name']] = (dims, data,
                                              measurement.dataarray_attrs())

        all_bands.attrs['crs'] = geobox.crs
        all_bands.load()
        return all_bands.load()
    else:
        return datacube.Datacube.load_data(sources, geobox, measurements,
                                           **kwargs)
Exemple #3
0
def test_new_datasource_s3():
    pytest.importorskip('datacube.drivers.s3.storage.s3aio.s3lio')

    from datacube.drivers.s3 import driver as s3_driver
    from datacube.drivers.s3.datasource import S3DataSource

    bands = [dict(name='green', path='')]
    dataset = mk_sample_dataset(bands,
                                s3_driver.PROTOCOL + ':///foo',
                                format=s3_driver.FORMAT)
    s3_dataset_fake = S3_dataset(macro_shape=(10, 12))
    dataset.s3_metadata = {'green': {'s3_dataset': s3_dataset_fake}}

    assert dataset.format == s3_driver.FORMAT
    assert dataset.uri_scheme == s3_driver.PROTOCOL

    rdr = new_datasource(dataset, 'green')
    assert rdr is not None
    assert isinstance(rdr, S3DataSource)