コード例 #1
0
def check_legacy_open(index):
    from datacube.api.core import Datacube
    dc = Datacube(index=index)

    data_array = dc.load(product='ls5_nbar_albers',
                         measurements=['blue'],
                         time='1992-03-23T23:14:25.500000',
                         use_threads=True)
    assert data_array['blue'].shape[0] == 1
    assert (data_array.blue != -999).any()

    # force fusing load by duplicating dataset
    dss = dc.find_datasets(product='ls5_nbar_albers',
                           time='1992-03-23T23:14:25.500000')

    assert len(dss) == 1

    dss = dss*2
    sources = dc.group_datasets(dss, query_group_by('time'))

    gbox = data_array.geobox
    mm = [dss[0].type.measurements['blue']]
    xx = dc.load_data(sources, gbox, mm)
    assert (xx == data_array).all()

    with rasterio.Env():
        xx_lazy = dc.load_data(sources, gbox, mm, dask_chunks={'time': 1})
        assert xx_lazy['blue'].data.dask
        assert xx_lazy.blue[0, :, :].equals(xx.blue[0, :, :])
コード例 #2
0
def check_load_via_dss(index):
    dc = Datacube(index=index)

    dss = dc.find_datasets(product='ls5_nbar_albers')
    assert len(dss) > 0

    xx1 = dc.load(product='ls5_nbar_albers', measurements=['blue'])
    xx2 = dc.load(datasets=dss, measurements=['blue'])
    assert xx1.blue.shape
    assert (xx1.blue != -999).any()
    assert (xx1.blue == xx2.blue).all()

    xx2 = dc.load(datasets=iter(dss), measurements=['blue'])
    assert xx1.blue.shape
    assert (xx1.blue != -999).any()
    assert (xx1.blue == xx2.blue).all()

    with pytest.raises(ValueError):
        dc.load(measurements=['blue'])
コード例 #3
0
def test_end_to_end(clirunner, index, testdata_dir, ingest_configs,
                    datacube_env_name):
    """
    Loads two dataset configurations, then ingests a sample Landsat 5 scene

    One dataset configuration specifies Australian Albers Equal Area Projection,
    the other is simply latitude/longitude.

    The input dataset should be recorded in the index, and two sets of storage units
    should be created on disk and recorded in the index.
    """

    lbg_nbar = testdata_dir / 'lbg' / LBG_NBAR
    lbg_pq = testdata_dir / 'lbg' / LBG_PQ
    ls5_nbar_albers_ingest_config = testdata_dir / ingest_configs[
        'ls5_nbar_albers']
    ls5_pq_albers_ingest_config = testdata_dir / ingest_configs['ls5_pq_albers']

    # Add the LS5 Dataset Types
    clirunner(['-v', 'product', 'add', str(LS5_DATASET_TYPES)])

    # Index the Datasets
    #  - do test run first to increase test coverage
    clirunner(
        ['-v', 'dataset', 'add', '--dry-run',
         str(lbg_nbar),
         str(lbg_pq)])

    #  - do actual indexing
    clirunner(['-v', 'dataset', 'add', str(lbg_nbar), str(lbg_pq)])

    #  - this will be no-op but with ignore lineage
    clirunner([
        '-v', 'dataset', 'add', '--confirm-ignore-lineage',
        str(lbg_nbar),
        str(lbg_pq)
    ])

    # Test no-op update
    for policy in ['archive', 'forget', 'keep']:
        clirunner([
            '-v', 'dataset', 'update', '--dry-run', '--location-policy',
            policy,
            str(lbg_nbar),
            str(lbg_pq)
        ])

        # Test no changes needed update
        clirunner([
            '-v', 'dataset', 'update', '--location-policy', policy,
            str(lbg_nbar),
            str(lbg_pq)
        ])

    # TODO: test location update
    # 1. Make a copy of a file
    # 2. Call dataset update with archive/forget
    # 3. Check location

    # Ingest NBAR
    clirunner(['-v', 'ingest', '-c', str(ls5_nbar_albers_ingest_config)])

    # Ingest PQ
    clirunner(['-v', 'ingest', '-c', str(ls5_pq_albers_ingest_config)])

    dc = Datacube(index=index)
    assert isinstance(str(dc), str)
    assert isinstance(repr(dc), str)

    with pytest.raises(ValueError):
        dc.find_datasets(time='2019')  # no product supplied, raises exception

    check_open_with_dc(index)
    check_open_with_grid_workflow(index)
    check_load_via_dss(index)