Ejemplo n.º 1
0
def foo():
    # measurements: [green, red, nir, swir1, swir2]
    virtual_product_defn = yaml.safe_load('''
    transform: wofs.virtualproduct.Wofs
    input:
        product: ls8_ard
        measurements: [nbart_blue, nbart_green, nbart_red, nbart_nir, nbart_swir_1, nbart_swir_2, fmask]
    ''')
    virtual_product = construct(**virtual_product_defn)

    # [odc_conf_test] -
    # db_hostname: agdcdev-db.nci.org.au
    # db_port: 6432
    # db_database: odc_conf_test

    # [ard_interop] - collection upgrade DB
    # db_hostname: agdcstaging-db.nci.org.au
    # db_port:     6432
    # db_database: ard_interop

    dc = datacube.Datacube(env="odc_conf_test")

    vdbag = virtual_product.query(dc=dc,
                                  id='be43b7ce-c421-4c16-826d-a508f3e3d984')

    box = virtual_product.group(vdbag,
                                output_crs='EPSG:28355',
                                resolution=(-25, 25))

    virtual_product.output_measurements(vdbag.product_definitions)

    data = virtual_product.fetch(box, dask_chunks=dict(x=1000, y=1000))

    print(data)
Ejemplo n.º 2
0
def test_to_float(dc, query):
    to_float_recipe = yaml.load("""
        transform: to_float
        input:
            product: ls8_nbar_albers
            measurements: [blue]
    """)

    to_float = construct(**to_float_recipe)

    with mock.patch('datacube.virtual.impl.Datacube') as mock_datacube:
        mock_datacube.load_data = load_data
        mock_datacube.group_datasets = group_datasets
        data = to_float.load(dc, **query)

    assert numpy.all(numpy.isnan(data.blue.values))
    assert data.blue.dtype == 'float32'
Ejemplo n.º 3
0
def test_rename(dc, query):
    rename_recipe = yaml.load("""
        transform: rename
        measurement_names:
            green: verde
        input:
            product: ls8_nbar_albers
            measurements: [blue, green]
    """)

    rename = construct(**rename_recipe)

    with mock.patch('datacube.virtual.impl.Datacube') as mock_datacube:
        mock_datacube.load_data = load_data
        mock_datacube.group_datasets = group_datasets
        data = rename.load(dc, **query)

    assert 'verde' in data
    assert 'blue' in data
    assert 'green' not in data
Ejemplo n.º 4
0
def cal_mean_std(query_poly):
    landsat_yaml = 'nbart_ld.yaml'
    with open(landsat_yaml, 'r') as f:
        recipe = yaml.safe_load(f)
    landsat_product = construct(**recipe)
    query = {'time': ('1987-01-01', '2000-01-01')}
    location = {'geopolygon': Geometry(query_poly, CRS("EPSG:3577"))}
    query.update(location)

    dc = Datacube()
    datasets = landsat_product.query(dc, **query)
    grouped = landsat_product.group(datasets, **query)
    _LOG.debug("datasets %s", grouped)

    mask = generate_raster([(query_poly, 1)], grouped.geobox)
    coastline_mask = clip_coastline(grouped.geobox)
    mask[coastline_mask == 0] = 0
    _LOG.debug("mask size %s none zero %s", mask.size, np.count_nonzero(mask))
    if np.count_nonzero(mask) == 0:
        return [], []

    darkest_mean = []
    time_mark = []
    future_list = []

    with MPIPoolExecutor() as executor:
        for i in range(grouped.box.time.shape[0]):
            time_slice = VirtualDatasetBox(grouped.box.sel(time=grouped.box.time.data[i:i+1]), grouped.geobox,
                            grouped.load_natively, grouped.product_definitions, grouped.geopolygon)
            future = executor.submit(load_cal, landsat_product, time_slice, mask)
            future_list.append(future)

    for future in future_list:
        r = future.result()
        if r[1] is not None:
            _LOG.debug("darkest time %s", r[0])
            _LOG.debug("darkest mean %s", r[1])
            time_mark.append(r[0])
            darkest_mean.append(r[1])
    return time_mark, darkest_mean
Ejemplo n.º 5
0
def cloud_free_nbar():
    recipe = yaml.load("""
    collate:
      - transform: apply_mask
        mask_measurement_name: pixelquality
        input:
          &mask
          transform: make_mask
          flags:
              blue_saturated: false
              cloud_acca: no_cloud
              cloud_fmask: no_cloud
              cloud_shadow_acca: no_cloud_shadow
              cloud_shadow_fmask: no_cloud_shadow
              contiguous: true
              green_saturated: false
              nir_saturated: false
              red_saturated: false
              swir1_saturated: false
              swir2_saturated: false
          mask_measurement_name: pixelquality
          input:
            juxtapose:
              - product: ls8_nbar_albers
                measurements: ['blue', 'green']
              - product: ls8_pq_albers
      - transform: datacube.virtual.transformations.ApplyMask
        mask_measurement_name: pixelquality
        input:
          <<: *mask
          input:
            juxtapose:
              - product: ls7_nbar_albers
                measurements: ['blue', 'green']
              - product: ls7_pq_albers
    index_measurement_name: source_index
    """)

    return construct(**recipe)
Ejemplo n.º 6
0
def create_process(process, input, **settings):
    process_class = import_function(process)
    return process_class(input=construct(**input), **settings)
Ejemplo n.º 7
0
        tmad['sdev'].data = container['sdev']
        tmad['bcdev'].data = container['bcdev']

        # Calculate the mean of all the tmad inputs
        tmad_mean = np.mean(np.stack([tmad.edev.data, tmad.sdev.data, tmad.bcdev.data], axis=-1), axis=-1)
        # Convert type to float64 (required for quickshift)
        ## tmad_mean = np.float64(tmad_mean)
        # Segment
        ## tmad_seg = quickshift(tmad_mean, kernel_size=5, convert2lab=False, max_dist=500, ratio=0.5)
        # Calculate the median for each segment
        ## tmad_median_seg = scipy.ndimage.median(input=tmad_mean, labels=tmad_seg, index=tmad_seg)
        # Set threshold as 10th percentile of mean TMAD
        thresh = np.percentile(tmad_mean.ravel(), 10)
        # Create boolean layer using threshold
        ## tmad_thresh = tmad_median_seg < thresh
        tmad_thresh = tmad_mean < thresh
        # Convert from boolean to binary
        tmad_thresh = tmad_thresh * 1
        #tmad_thresh = tmad_thresh.astype(float)
        out = xr.Dataset({'cultman_agr_cat': (tmad.dims, tmad_thresh)}, coords=tmad.coords, attrs=tmad.attrs)
        return out
        #return tv_summary_filt.to_dataset(name='cultman_agr_cat')

    def measurements(self, input_measurements):
        return {'cultman_agr_cat': Measurement(name='cultman_agr_cat', dtype='float32', nodata=float('nan'), units='1')}

vegetat_veg_cat = construct(transform=Cultivated, input=dict(product='ls8_nbart_tmad_annual')

#vegetat_veg_cat_data = FC_summary.load(dc, **search_terms)

Ejemplo n.º 8
0
from datacube.virtual import Transformation, Measurement, construct, DEFAULT_RESOLVER
import datacube

import yaml

from wofs.vp_wofs import woffles_ard_no_terrain_filter

# measurements: [green, red, nir, swir1, swir2]
virtual_product_defn = yaml.safe_load('''
transform: wofs.virtualproduct.Wofs
input:
    product: ls8_ard
    measurements: [nbart_blue, nbart_green, nbart_red, nbart_nir, nbart_swir_1, nbart_swir_2, fmask]
''')
virtual_product = construct(**virtual_product_defn)

# [odc_conf_test] -
# db_hostname: agdcdev-db.nci.org.au
# db_port: 6432
# db_database: odc_conf_test

# [ard_interop] - collection upgrade DB
# db_hostname: agdcstaging-db.nci.org.au
# db_port:     6432
# db_database: ard_interop

dc = datacube.Datacube(env="odc_conf_test")

vdbag = virtual_product.query(dc=dc, id='be43b7ce-c421-4c16-826d-a508f3e3d984')

box = virtual_product.group(vdbag,
Ejemplo n.º 9
0
                         coords=meta_d.coords,
                         attrs=meta_d.attrs)
        return out.to_dataset(name='vegetat_veg_cat')

    def measurements(self, input_measurements):
        return {
            'vegetat_veg_cat':
            Measurement(name='vegetat_veg_cat',
                        dtype='float32',
                        nodata=float('nan'),
                        units='1')
        }


vsvg = construct(aggregate=FC_summary,
                 group_by='time',
                 input=dict(product='fc'))

fc_with_wofs = construct_from_yaml("""
    juxtapose:
      - product: ls8_fc_albers
      - product: wofs_albers
""")

vegetat_veg_cat = construct(transform=FC_summary, input=fc_with_wofs)

vegetat_veg_cat_data = vegetat_veg_cat.load(dc,
                                            dask_chunks={
                                                'x': 512,
                                                'y': 512,
                                                'time': -1