def check_analytics_pixel_drill(index):
    from datetime import datetime
    from datacube.analytics.analytics_engine import AnalyticsEngine
    from datacube.execution.execution_engine import ExecutionEngine

    a = AnalyticsEngine(index=index)
    e = ExecutionEngine(index=index)

    nbar_storage_type = 'ls5_nbar_albers'
    var1 = 'nir'
    var2 = 'red'
    pq_storage_type = 'ls5_pq_albers'
    pq_var = 'pixelquality'

    # Lake Burley Griffin
    dimensions = {'x':    {'range': (149.12)},
                  'y':    {'range': (-35.30)},
                  'time': {'range': (datetime(1992, 1, 1), datetime(1992, 12, 31))}}

    b40 = a.create_array(nbar_storage_type, [var1], dimensions, 'b40')
    b30 = a.create_array(nbar_storage_type, [var2], dimensions, 'b30')
    pq = a.create_array(pq_storage_type, [pq_var], dimensions, 'pq')

    result = e.execute_plan(a.plan)
    assert e.cache['b40']
    assert e.cache['b30']
    assert e.cache['pq']
    assert e.cache['b40']['array_result'][var1].size > 0
    assert e.cache['b30']['array_result'][var2].size > 0
    assert e.cache['pq']['array_result'][pq_var].size > 0
Exemple #2
0
def check_analytics_create_array(index):
    from datetime import datetime
    from datacube.analytics.analytics_engine import AnalyticsEngine
    from datacube.execution.execution_engine import ExecutionEngine

    a = AnalyticsEngine(index=index)
    e = ExecutionEngine(index=index)

    platform = 'LANDSAT_5'
    product = 'nbar'
    var1 = 'red'
    var2 = 'nir'

    # Lake Burley Griffin
    dimensions = {
        'x': {
            'range': (149.07, 149.18)
        },
        'y': {
            'range': (-35.32, -35.28)
        },
        'time': {
            'range': (datetime(1992, 1, 1), datetime(1992, 12, 31))
        }
    }

    arrays = a.create_array((platform, product), [var1, var2], dimensions,
                            'get_data')

    e.execute_plan(a.plan)

    assert e.cache['get_data']
Exemple #3
0
def test_perform_ndvi(mock_api):
    # Test perform ndvi

    a = AnalyticsEngine(api=mock_api)
    e = ExecutionEngine(api=mock_api)

    mock_api.get_descriptor.side_effect = mock_get_descriptor

    # Lake Burley Griffin
    dimensions = {
        'longitude': {
            'range': (149.07, 149.18)
        },
        'latitude': {
            'range': (-35.32, -35.28)
        },
        'time': {
            'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))
        }
    }

    b40 = a.create_array(('LANDSAT_5', 'NBAR'), ['band_40'], dimensions, 'b40')
    b30 = a.create_array(('LANDSAT_5', 'NBAR'), ['band_30'], dimensions, 'b30')

    ndvi = a.apply_expression([b40, b30],
                              '((array1 - array2) / (array1 + array2))',
                              'ndvi')

    res = e.execute_plan(a.plan)

    print(res)
Exemple #4
0
def test_get_data(mock_api):
    # Test get data

    # mock_api.get_data.return_value = mock_get_data(('band_30', 'band_40'))

    a = AnalyticsEngine(api=mock_api)
    e = ExecutionEngine(api=mock_api)

    # Lake Burley Griffin
    dimensions = {
        'longitude': {
            'range': (149.07, 149.18)
        },
        'latitude': {
            'range': (-35.32, -35.28)
        },
        'time': {
            'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))
        }
    }

    arrays = a.create_array(('LANDSAT_5', 'NBAR'), ['band_30', 'band_40'],
                            dimensions, 'get_data')

    e.execute_plan(a.plan)

    result = e.cache['get_data']
    assert 'array_result' in result
    assert 'band_30' in result['array_result']
    assert 'band_40' in result['array_result']

    assert result['array_result']['band_30'].shape == (2, 400, 400)
Exemple #5
0
def test_median_reduction_over_time_old_version(mock_api):
    # Test median reduction over time - old version for backwards compatibility

    a = AnalyticsEngine(api=mock_api)
    e = ExecutionEngine(api=mock_api)

    # Lake Burley Griffin
    dimensions = {
        'longitude': {
            'range': (149.07, 149.18)
        },
        'latitude': {
            'range': (-35.32, -35.28)
        },
        'time': {
            'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))
        }
    }

    arrays = a.create_array(('LANDSAT_5', 'NBAR'), ['band_40'], dimensions,
                            'get_data')

    median_t = a.apply_reduction(arrays, ['time'], 'median', 'medianT')

    result = e.execute_plan(a.plan)
def main():
    a = AnalyticsEngine()
    e = ExecutionEngine()

    # Lake Burley Griffin
    dimensions = {'x':    {'range': (149.07, 149.18)},
                  'y':    {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}}

    ndvi = a.apply_sensor_specific_bandmath('LANDSAT_5', 'nbar', 'ndvi', dimensions, 'get_data', 'ndvi')

    result = e.execute_plan(a.plan)

    plot(e.cache['ndvi'])

    b30_result = e.cache['get_data']['array_result']['red']
    b40_result = e.cache['get_data']['array_result']['nir']
    ndvi_result = e.cache['ndvi']['array_result']['ndvi']

    b30_data = Data(x=b30_result[:, ::-1, :], label='B30')
    b40_data = Data(x=b40_result[:, ::-1, :], label='B40')
    ndvi_data = Data(x=ndvi_result[:, ::-1, :], label='ndvi')

    long_data = Data(x=b40_result.coords['x'], label='long')
    lat_data = Data(x=b40_result.coords['y'], label='lat')
    time_data = Data(x=b40_result.coords['time'], label='time')

    collection = DataCollection([ndvi_data, b30_data, b40_data, long_data, lat_data, time_data, ])
    app = GlueApplication(collection)
    app.start()
Exemple #7
0
def test_median_reduction_over_lat_long(mock_api):
    # Test median reduction over lat/long

    a = AnalyticsEngine(api=mock_api)
    e = ExecutionEngine(api=mock_api)

    # Lake Burley Griffin
    dimensions = {
        'longitude': {
            'range': (149.07, 149.18)
        },
        'latitude': {
            'range': (-35.32, -35.28)
        },
        'time': {
            'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))
        }
    }

    arrays = a.create_array(('LANDSAT_5', 'NBAR'), ['band_40'], dimensions,
                            'get_data')

    median = a.apply_expression(arrays, 'median(array1, 1, 2)', 'medianXY')

    e.execute_plan(a.plan)
Exemple #8
0
def test_perform_old_ndvi_version(mock_api):
    # Test perform ndvi - old version for backwards compatibility

    a = AnalyticsEngine(api=mock_api)
    e = ExecutionEngine(api=mock_api)

    # Lake Burley Griffin
    dimensions = {
        'longitude': {
            'range': (149.07, 149.18)
        },
        'latitude': {
            'range': (-35.32, -35.28)
        },
        'time': {
            'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))
        }
    }

    arrays = a.create_array(('LANDSAT_5', 'NBAR'), ['band_40', 'band_30'],
                            dimensions, 'get_data')
    ndvi = a.apply_bandmath(arrays, '((array1 - array2) / (array1 + array2))',
                            'ndvi')

    e.execute_plan(a.plan)
def check_analytics_pixel_drill(index):
    from datetime import datetime
    from datacube.analytics.analytics_engine import AnalyticsEngine
    from datacube.execution.execution_engine import ExecutionEngine

    a = AnalyticsEngine(index=index)
    e = ExecutionEngine(index=index)

    nbar_storage_type = 'ls5_nbar_albers'
    var1 = 'nir'
    var2 = 'red'
    pq_storage_type = 'ls5_pq_albers'
    pq_var = 'pixelquality'

    # Lake Burley Griffin
    dimensions = {'x':    {'range': (149.12)},
                  'y':    {'range': (-35.30)},
                  'time': {'range': (datetime(1992, 1, 1), datetime(1992, 12, 31))}}

    b40 = a.create_array(nbar_storage_type, [var1], dimensions, 'b40')
    b30 = a.create_array(nbar_storage_type, [var2], dimensions, 'b30')
    pq = a.create_array(pq_storage_type, [pq_var], dimensions, 'pq')

    result = e.execute_plan(a.plan)
    assert e.cache['b40']
    assert e.cache['b30']
    assert e.cache['pq']
    assert e.cache['b40']['array_result'][var1].size > 0
    assert e.cache['b30']['array_result'][var2].size > 0
    assert e.cache['pq']['array_result'][pq_var].size > 0
def main():
    a = AnalyticsEngine()
    e = ExecutionEngine()

    # Lake Burley Griffin
    dimensions = {'x':    {'range': (149.07, 149.18)},
                  'y':    {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}}

    b40 = a.create_array(('LANDSAT_5', 'nbar'), ['nir'], dimensions, 'b40')
    b30 = a.create_array(('LANDSAT_5', 'nbar'), ['red'], dimensions, 'b30')

    ndvi = a.apply_expression([b40, b30], '((array1 - array2) / (array1 + array2))', 'ndvi')

    e.execute_plan(a.plan)
    plot(e.cache['ndvi'])

    b30_result = e.cache['b30']['array_result']['red']
    b40_result = e.cache['b40']['array_result']['nir']
    ndvi_result = e.cache['ndvi']['array_result']['ndvi']

    b30_data = Data(x=b30_result[:, ::-1, :], label='B30')
    b40_data = Data(x=b40_result[:, ::-1, :], label='B40')
    ndvi_data = Data(x=ndvi_result[:, ::-1, :], label='ndvi')

    long_data = Data(x=b40_result.coords['x'], label='long')
    lat_data = Data(x=b40_result.coords['y'], label='lat')
    time_data = Data(x=b40_result.coords['time'], label='time')

    collection = DataCollection([ndvi_data, b30_data, b40_data, long_data, lat_data, time_data, ])
    app = GlueApplication(collection)
    app.start()
Exemple #11
0
def main():
    a = AnalyticsEngine()
    e = ExecutionEngine()

    # Lake Burley Griffin
    dimensions = {'x':    {'range': (149.07, 149.18)},
                  'y':    {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}}

    arrays = a.create_array(('LANDSAT_5', 'nbar'), ['nir', 'red'], dimensions, 'get_data')

    ndvi = a.apply_bandmath(arrays, '((array1 - array2) / (array1 + array2))', 'ndvi')

    e.execute_plan(a.plan)

    plot(e.cache['ndvi'])

    b30_result = e.cache['get_data']['array_result']['red']
    b40_result = e.cache['get_data']['array_result']['nir']
    ndvi_result = e.cache['ndvi']['array_result']['ndvi']

    b30_data = Data(x=b30_result[:, ::-1, :], label='B30')
    b40_data = Data(x=b40_result[:, ::-1, :], label='B40')
    ndvi_data = Data(x=ndvi_result[:, ::-1, :], label='ndvi')

    long_data = Data(x=b40_result.coords['x'], label='long')
    lat_data = Data(x=b40_result.coords['y'], label='lat')
    time_data = Data(x=b40_result.coords['time'], label='time')

    collection = DataCollection([ndvi_data, b30_data, b40_data, long_data, lat_data, time_data, ])
    app = GlueApplication(collection)
    app.start()
Exemple #12
0
def main():
    a = AnalyticsEngine()
    e = ExecutionEngine()

    # Lake Burley Griffin
    dimensions = {
        'x': {
            'range': (149.07, 149.18)
        },
        'y': {
            'range': (-35.32, -35.28)
        },
        'time': {
            'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))
        }
    }

    arrays = a.create_array(('LANDSAT_5', 'nbar'), ['nir'], dimensions,
                            'get_data')

    median_xy = a.apply_generic_reduction(arrays, ['y', 'x'], 'median(array1)',
                                          'medianXY')

    result = e.execute_plan(a.plan)

    plot(e.cache['medianXY'])
Exemple #13
0
def main():
    a = AnalyticsEngine()
    e = ExecutionEngine()

    # Lake Burley Griffin
    dimensions = {'x':    {'range': (149.07, 149.18)},
                  'y':    {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}}

    arrays = a.create_array(('LANDSAT_5', 'nbar'), ['nir'], dimensions, 'get_data')

    median_t = a.apply_generic_reduction(arrays, ['time'], 'median(array1)', 'medianT')

    result = e.execute_plan(a.plan)

    plot(e.cache['medianT'])

    b40_result = e.cache['get_data']['array_result']['nir']
    median_result = e.cache['medianT']['array_result']['medianT']

    b40_data = Data(x=b40_result[:, ::-1, :], label='B40')
    median_data = Data(x=median_result[::-1, :], label='medianT')

    long_data = Data(x=b40_result.coords['x'], label='long')
    lat_data = Data(x=b40_result.coords['y'], label='lat')
    time_data = Data(x=b40_result.coords['time'], label='time')

    collection = DataCollection([median_data, b40_data, long_data, lat_data, time_data, ])
    app = GlueApplication(collection)
    app.start()
Exemple #14
0
def test_bit_of_everything(mock_api):
    # Test bit of everything

    a = AnalyticsEngine(api=mock_api)
    e = ExecutionEngine(api=mock_api)

    # Lake Burley Griffin
    dimensions = {
        'longitude': {
            'range': (149.07, 149.18)
        },
        'latitude': {
            'range': (-35.32, -35.28)
        },
        'time': {
            'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))
        }
    }

    b40 = a.create_array(('LANDSAT_5', 'NBAR'), ['band_40'], dimensions, 'b40')
    b30 = a.create_array(('LANDSAT_5', 'NBAR'), ['band_30'], dimensions, 'b30')
    pq = a.create_array(('LANDSAT_5', 'PQ'), ['band_pixelquality'], dimensions,
                        'pq')

    ndvi = a.apply_expression([b40, b30],
                              '((array1 - array2) / (array1 + array2))',
                              'ndvi')
    adjusted_ndvi = a.apply_expression(ndvi, '(ndvi*0.5)', 'adjusted_ndvi')
    mask = a.apply_expression([adjusted_ndvi, pq], 'array1{array2}', 'mask')
    median_t = a.apply_expression(mask, 'median(array1, 0)', 'medianT')

    result = e.execute_plan(a.plan)
def check_analytics_list_searchables(index):
    from datacube.analytics.analytics_engine import AnalyticsEngine

    a = AnalyticsEngine(index=index)
    result = a.list_searchables()

    assert len(result) > 0
    for storage_type in result:
        assert len(result[storage_type]['bands']) > 0
        assert len(list(result[storage_type]['dimensions'])) > 0
        assert result[storage_type]['instrument']
        assert result[storage_type]['platform']
        assert result[storage_type]['product_type']
        assert result[storage_type]['storage_type']
Exemple #16
0
def test_sensor_specific_bandmath_old_version(mock_api):
    # Test sensor specific bandmath - old version for backwards compatibility

    a = AnalyticsEngine(api=mock_api)
    e = ExecutionEngine(api=mock_api)

    # Lake Burley Griffin
    dimensions = {'longitude': {'range': (149.07, 149.18)},
                  'latitude': {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}}

    ndvi = a.apply_sensor_specific_bandmath('LANDSAT_5', 'NBAR', 'ndvi', dimensions, 'get_data', 'ndvi')

    result = e.execute_plan(a.plan)
def check_analytics_list_searchables(index):
    from datacube.analytics.analytics_engine import AnalyticsEngine

    a = AnalyticsEngine(index=index)
    result = a.list_searchables()

    assert len(result) > 0
    for storage_type in result:
        assert len(result[storage_type]['bands']) > 0
        assert len(list(result[storage_type]['dimensions'])) > 0
        assert result[storage_type]['instrument']
        assert result[storage_type]['platform']
        assert result[storage_type]['product_type']
        assert result[storage_type]['storage_type']
def test_perform_old_ndvi_version(mock_api):
    # Test perform ndvi - old version for backwards compatibility

    a = AnalyticsEngine(api=mock_api)
    e = ExecutionEngine(api=mock_api)

    # Lake Burley Griffin
    dimensions = {'longitude': {'range': (149.07, 149.18)},
                  'latitude': {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}}

    arrays = a.create_array(('LANDSAT_5', 'NBAR'), ['band_40', 'band_30'], dimensions, 'get_data')
    ndvi = a.apply_bandmath(arrays, '((array1 - array2) / (array1 + array2))', 'ndvi')

    e.execute_plan(a.plan)
def test_median_reduction_over_time(mock_api):
    # Test median reduction over time

    a = AnalyticsEngine(api=mock_api)
    e = ExecutionEngine(api=mock_api)

    # Lake Burley Griffin
    dimensions = {'longitude': {'range': (149.07, 149.18)},
                  'latitude': {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}}

    arrays = a.create_array(('LANDSAT_5', 'NBAR'), ['band_40'], dimensions, 'get_data')

    median = a.apply_expression(arrays, 'median(array1, 0)', 'medianT')

    e.execute_plan(a.plan)
def test_median_reduction_over_time_old_version(mock_api):
    # Test median reduction over time - old version for backwards compatibility

    a = AnalyticsEngine(api=mock_api)
    e = ExecutionEngine(api=mock_api)

    # Lake Burley Griffin
    dimensions = {'longitude': {'range': (149.07, 149.18)},
                  'latitude': {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}}

    arrays = a.create_array(('LANDSAT_5', 'NBAR'), ['band_40'], dimensions, 'get_data')

    median_t = a.apply_reduction(arrays, ['time'], 'median', 'medianT')

    result = e.execute_plan(a.plan)
def main():
    a = AnalyticsEngine()
    e = ExecutionEngine()

    # Lake Burley Griffin
    dimensions = {'x':    {'range': (149.07, 149.18)},
                  'y':    {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}}

    arrays = a.create_array(('LANDSAT_5', 'nbar'), ['nir'], dimensions, 'get_data')

    median = a.apply_expression(arrays, 'median(array1, 1, 2)', 'medianXY')

    e.execute_plan(a.plan)

    plot(e.cache['medianXY'])
Exemple #22
0
def main():
    a = AnalyticsEngine()
    e = ExecutionEngine()

    # Lake Burley Griffin
    dimensions = {
        'x': {
            'range': (149.07, 149.18)
        },
        'y': {
            'range': (-35.32, -35.28)
        },
        'time': {
            'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))
        }
    }

    arrays = a.create_array(('LANDSAT_5', 'nbar'), ['nir'], dimensions,
                            'get_data')

    median_t = a.apply_generic_reduction(arrays, ['time'], 'median(array1)',
                                         'medianT')

    result = e.execute_plan(a.plan)

    plot(e.cache['medianT'])

    b40_result = e.cache['get_data']['array_result']['nir']
    median_result = e.cache['medianT']['array_result']['medianT']

    b40_data = Data(x=b40_result[:, ::-1, :], label='B40')
    median_data = Data(x=median_result[::-1, :], label='medianT')

    long_data = Data(x=b40_result.coords['x'], label='long')
    lat_data = Data(x=b40_result.coords['y'], label='lat')
    time_data = Data(x=b40_result.coords['time'], label='time')

    collection = DataCollection([
        median_data,
        b40_data,
        long_data,
        lat_data,
        time_data,
    ])
    app = GlueApplication(collection)
    app.start()
def TVI (product, x1, x2, y1, y2, year):
    a = AnalyticsEngine()
    e = ExecutionEngine()
    #assert product
    #dc.list_products() is pandas dataframe, .loc[:]['name'] selects
    #   the products is a pandas series. values is array
    assert (product in dc.list_products().loc[:]['name'].values), "Product not in database"
    
    # 2 index for platform, 3 for product type
    for prod  in dc.list_products().loc[:].values:
        if(product == prod[0]):
            platform = prod[2]
            product_type = prod[3]
    
    # assert time
    la = dc.load(product=product, x = (x1, x2),y = (y1, y2))
    # this is date of product (la.items()[0])[1].values[0]
    date_of_prod = (la.items()[0])[1].values[0]
    #this is numpy.datetime64
    
    # TO DO COMPARE numpy.datetime64 with datetime.datetime    
    #assert (date_of_prod >= time1 and date_of_prod <= time2), "Product not in the provided time"
    
    
    time1 = datetime(year, 1, 1)
    time2 = datetime(year, 12, 31)
    # calculate output
    dimensions = {'x':    {'range': (x1, x2)},
                  'y':    {'range': (y1, y2)},
                  'time': {'range': (time1, time2)}}
    
    # create arrays
    b40 = a.create_array((platform, product_type), ['nir'], dimensions, 'b40')
    b30 = a.create_array((platform, product_type), ['red'], dimensions, 'b30')
    
    #ratio vegetation index
    tvi = a.apply_expression([b40, b30], '(sqrt(((array1 - array2) / (array1 + array2)) + 0.5) * 100)', 'tvi')
    
    e.execute_plan(a.plan)
    
    #result x array
    res = e.cache['tvi']['array_result']['tvi']
    
    res.plot()
def test_perform_ndvi_mask(mock_api):
    # Test perform ndvi + mask

    a = AnalyticsEngine(api=mock_api)
    e = ExecutionEngine(api=mock_api)

    # Lake Burley Griffin
    dimensions = {'longitude': {'range': (149.07, 149.18)},
                  'latitude': {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}}

    b40 = a.create_array(('LANDSAT_5', 'NBAR'), ['band_40'], dimensions, 'b40')
    b30 = a.create_array(('LANDSAT_5', 'NBAR'), ['band_30'], dimensions, 'b30')
    pq = a.create_array(('LANDSAT_5', 'PQ'), ['band_pixelquality'], dimensions, 'pq')

    ndvi = a.apply_expression([b40, b30], '((array1 - array2) / (array1 + array2))', 'ndvi')
    mask = a.apply_expression([ndvi, pq], 'array1{array2}', 'mask')

    e.execute_plan(a.plan)
def test_perform_ndvi(mock_api):
    # Test perform ndvi

    a = AnalyticsEngine(api=mock_api)
    e = ExecutionEngine(api=mock_api)

    mock_api.get_descriptor.side_effect = mock_get_descriptor

    # Lake Burley Griffin
    dimensions = {'longitude': {'range': (149.07, 149.18)},
                  'latitude': {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}}

    b40 = a.create_array(('LANDSAT_5', 'NBAR'), ['band_40'], dimensions, 'b40')
    b30 = a.create_array(('LANDSAT_5', 'NBAR'), ['band_30'], dimensions, 'b30')

    ndvi = a.apply_expression([b40, b30], '((array1 - array2) / (array1 + array2))', 'ndvi')

    res = e.execute_plan(a.plan)

    print(res)
Exemple #26
0
def test_perform_ndvi_mask(mock_api):
    # Test perform ndvi + mask

    a = AnalyticsEngine(api=mock_api)
    e = ExecutionEngine(api=mock_api)

    # Lake Burley Griffin
    dimensions = {
        'longitude': {
            'range': (149.07, 149.18)
        },
        'latitude': {
            'range': (-35.32, -35.28)
        },
        'time': {
            'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))
        }
    }

    b40 = a.create_array(('LANDSAT_5', 'NBAR'), ['band_40'], dimensions, 'b40')
    b30 = a.create_array(('LANDSAT_5', 'NBAR'), ['band_30'], dimensions, 'b30')
    pq = a.create_array(('LANDSAT_5', 'PQ'), ['band_pixelquality'], dimensions,
                        'pq')

    ndvi = a.apply_expression([b40, b30],
                              '((array1 - array2) / (array1 + array2))',
                              'ndvi')
    mask = a.apply_expression([ndvi, pq], 'array1{array2}', 'mask')

    e.execute_plan(a.plan)
Exemple #27
0
def main():
    a = AnalyticsEngine()
    e = ExecutionEngine()

    # Lake Burley Griffin
    dimensions = {'x':    {'range': (149.07, 149.18)},
                  'y':    {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}}

    arrays = a.create_array(('LANDSAT_5', 'nbar'), ['nir', 'red'], dimensions, 'get_data')

    ndvi = a.apply_bandmath(arrays, '((array1 - array2) / (array1 + array2))', 'ndvi')
    pq = a.create_array(('LANDSAT_5', 'pqa'), ['pixelquality'], dimensions, 'pq')
    mask = a.apply_cloud_mask(ndvi, pq, 'mask')

    e.execute_plan(a.plan)

    plot(e.cache['mask'])

    b30_result = e.cache['get_data']['array_result']['red']
    b40_result = e.cache['get_data']['array_result']['nir']
    ndvi_result = e.cache['ndvi']['array_result']['ndvi']
    pq_result = e.cache['pq']['array_result']['pixelquality']
    mask_result = e.cache['mask']['array_result']['mask']

    b30_data = Data(x=b30_result[:, ::-1, :], label='B30')
    b40_data = Data(x=b40_result[:, ::-1, :], label='B40')
    ndvi_data = Data(x=ndvi_result[:, ::-1, :], label='ndvi')
    pq_data = Data(x=pq_result[:, ::-1, :], label='pq')
    mask_data = Data(x=mask_result[:, ::-1, :], label='mask')

    long_data = Data(x=b40_result.coords['x'], label='long')
    lat_data = Data(x=b40_result.coords['y'], label='lat')
    time_data = Data(x=b40_result.coords['time'], label='time')

    collection = DataCollection([mask_data, pq_data, ndvi_data, b30_data, b40_data, long_data, lat_data, time_data, ])
    app = GlueApplication(collection)
    app.start()
Exemple #28
0
def check_analytics_ndvi_mask_median_expression(index):
    from datetime import datetime
    from datacube.analytics.analytics_engine import AnalyticsEngine
    from datacube.execution.execution_engine import ExecutionEngine

    a = AnalyticsEngine(index=index)
    e = ExecutionEngine(index=index)

    platform = 'LANDSAT_5'
    product = 'nbar'
    var1 = 'nir'
    var2 = 'red'
    pq_product = 'pqa'
    pq_var = 'pixelquality'

    # Lake Burley Griffin
    dimensions = {
        'x': {
            'range': (149.07, 149.18)
        },
        'y': {
            'range': (-35.32, -35.28)
        },
        'time': {
            'range': (datetime(1992, 1, 1), datetime(1992, 12, 31))
        }
    }

    b40 = a.create_array((platform, product), [var1], dimensions, 'b40')
    b30 = a.create_array((platform, product), [var2], dimensions, 'b30')
    pq = a.create_array((platform, pq_product), [pq_var], dimensions, 'pq')

    ndvi = a.apply_expression([b40, b30],
                              '((array1 - array2) / (array1 + array2))',
                              'ndvi')
    mask = a.apply_expression(
        [ndvi, pq],
        'array1{(array2 == 32767) | (array2 == 16383) | (array2 == 2457)}',
        'mask')
    median_t = a.apply_expression(mask, 'median(array1, 0)', 'medianT')

    result = e.execute_plan(a.plan)

    assert e.cache['b40']
    assert e.cache['b30']
    assert e.cache['pq']
    assert e.cache['b40']['array_result'][var1].size > 0
    assert e.cache['b30']['array_result'][var2].size > 0
    assert e.cache['pq']['array_result'][pq_var].size > 0

    assert e.cache['ndvi']
    assert e.cache['mask']
    assert e.cache['medianT']
def test_get_data(mock_api):
    # Test get data

    # mock_api.get_data.return_value = mock_get_data(('band_30', 'band_40'))

    a = AnalyticsEngine(api=mock_api)
    e = ExecutionEngine(api=mock_api)

    # Lake Burley Griffin
    dimensions = {'longitude': {'range': (149.07, 149.18)},
                  'latitude': {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}}

    arrays = a.create_array(('LANDSAT_5', 'NBAR'), ['band_30', 'band_40'], dimensions, 'get_data')

    e.execute_plan(a.plan)

    result = e.cache['get_data']
    assert 'array_result' in result
    assert 'band_30' in result['array_result']
    assert 'band_40' in result['array_result']

    assert result['array_result']['band_30'].shape == (2, 400, 400)
def check_analytics_create_array(index):
    from datetime import datetime
    from datacube.analytics.analytics_engine import AnalyticsEngine
    from datacube.execution.execution_engine import ExecutionEngine

    a = AnalyticsEngine(index=index)
    e = ExecutionEngine(index=index)

    platform = 'LANDSAT_5'
    product = 'nbar'
    var1 = 'red'
    var2 = 'nir'

    # Lake Burley Griffin
    dimensions = {'x':    {'range': (149.07, 149.18)},
                  'y':    {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1992, 1, 1), datetime(1992, 12, 31))}}

    arrays = a.create_array((platform, product), [var1, var2], dimensions, 'get_data')

    e.execute_plan(a.plan)

    assert e.cache['get_data']
def main():
    a = AnalyticsEngine()
    e = ExecutionEngine()

    # Lake Burley Griffin
    dimensions = {'x':    {'range': (149.07, 149.18)},
                  'y':    {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}}

    b40 = a.create_array(('LANDSAT_5', 'nbar'), ['nir'], dimensions, 'b40')
    b30 = a.create_array(('LANDSAT_5', 'nbar'), ['red'], dimensions, 'b30')
    pq = a.create_array(('LANDSAT_5', 'pqa'), ['pixelquality'], dimensions, 'pq')

    ndvi = a.apply_expression([b40, b30], '((array1 - array2) / (array1 + array2))', 'ndvi')
    mask = a.apply_expression([ndvi, pq], 'array1{(array2 == 32767) | (array2 == 16383) | (array2 == 2457)}', 'mask')
    median_t = a.apply_expression(mask, 'median(array1, 0)', 'medianT')

    result = e.execute_plan(a.plan)

    plot(e.cache['medianT'])

    b30_result = e.cache['b30']['array_result']['red']
    b40_result = e.cache['b40']['array_result']['nir']
    ndvi_result = e.cache['ndvi']['array_result']['ndvi']
    pq_result = e.cache['pq']['array_result']['pixelquality']
    mask_result = e.cache['mask']['array_result']['mask']
    median_result = e.cache['medianT']['array_result']['medianT']

    b30_data = Data(x=b30_result[:, ::-1, :], label='B30')
    b40_data = Data(x=b40_result[:, ::-1, :], label='B40')
    ndvi_data = Data(x=ndvi_result[:, ::-1, :], label='ndvi')
    pq_data = Data(x=pq_result[:, ::-1, :], label='pq')
    mask_data = Data(x=mask_result[:, ::-1, :], label='mask')
    median_data = Data(x=median_result[::-1, :], label='median')

    long_data = Data(x=b40_result.coords['x'], label='long')
    lat_data = Data(x=b40_result.coords['y'], label='lat')
    time_data = Data(x=b40_result.coords['time'], label='time')

    collection = DataCollection([median_data, mask_data, pq_data, ndvi_data, b30_data, b40_data,
                                 long_data, lat_data, time_data, ])
    app = GlueApplication(collection)
    app.start()
def test_bit_of_everything(mock_api):
    # Test bit of everything

    a = AnalyticsEngine(api=mock_api)
    e = ExecutionEngine(api=mock_api)

    # Lake Burley Griffin
    dimensions = {'longitude': {'range': (149.07, 149.18)},
                  'latitude': {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}}

    b40 = a.create_array(('LANDSAT_5', 'NBAR'), ['band_40'], dimensions, 'b40')
    b30 = a.create_array(('LANDSAT_5', 'NBAR'), ['band_30'], dimensions, 'b30')
    pq = a.create_array(('LANDSAT_5', 'PQ'), ['band_pixelquality'], dimensions, 'pq')

    ndvi = a.apply_expression([b40, b30], '((array1 - array2) / (array1 + array2))', 'ndvi')
    adjusted_ndvi = a.apply_expression(ndvi, '(ndvi*0.5)', 'adjusted_ndvi')
    mask = a.apply_expression([adjusted_ndvi, pq], 'array1{array2}', 'mask')
    median_t = a.apply_expression(mask, 'median(array1, 0)', 'medianT')

    result = e.execute_plan(a.plan)
def main():
    a = AnalyticsEngine()
    e = ExecutionEngine()

    # Lake Burley Griffin
    dimensions = {'x':    {'range': (149.07, 149.18)},
                  'y':    {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}}

    b40 = a.create_array(('LANDSAT_5', 'nbar'), ['nir'], dimensions, 'b40')
    b30 = a.create_array(('LANDSAT_5', 'nbar'), ['red'], dimensions, 'b30')
    pq = a.create_array(('LANDSAT_5', 'pqa'), ['pixelquality'], dimensions, 'pq')

    ndvi = a.apply_expression([b40, b30], '((array1 - array2) / (array1 + array2))', 'ndvi')
    mask = a.apply_expression([ndvi, pq], 'array1{(array2 == 32767) | (array2 == 16383) | (array2 == 2457)}', 'mask')
    median_t = a.apply_expression(mask, 'median(array1, 0)', 'medianT')

    result = e.execute_plan(a.plan)

    plot(e.cache['medianT'])

    b30_result = e.cache['b30']['array_result']['red']
    b40_result = e.cache['b40']['array_result']['nir']
    ndvi_result = e.cache['ndvi']['array_result']['ndvi']
    pq_result = e.cache['pq']['array_result']['pixelquality']
    mask_result = e.cache['mask']['array_result']['mask']
    median_result = e.cache['medianT']['array_result']['medianT']

    b30_data = Data(x=b30_result[:, ::-1, :], label='B30')
    b40_data = Data(x=b40_result[:, ::-1, :], label='B40')
    ndvi_data = Data(x=ndvi_result[:, ::-1, :], label='ndvi')
    pq_data = Data(x=pq_result[:, ::-1, :], label='pq')
    mask_data = Data(x=mask_result[:, ::-1, :], label='mask')
    median_data = Data(x=median_result[::-1, :], label='median')

    long_data = Data(x=b40_result.coords['x'], label='long')
    lat_data = Data(x=b40_result.coords['y'], label='lat')
    time_data = Data(x=b40_result.coords['time'], label='time')

    collection = DataCollection([median_data, mask_data, pq_data, ndvi_data, b30_data, b40_data,
                                 long_data, lat_data, time_data, ])
    app = GlueApplication(collection)
    app.start()
def check_analytics_ndvi_mask_median_expression(index):
    from datetime import datetime
    from datacube.analytics.analytics_engine import AnalyticsEngine
    from datacube.execution.execution_engine import ExecutionEngine

    a = AnalyticsEngine(index=index)
    e = ExecutionEngine(index=index)

    platform = 'LANDSAT_5'
    product = 'nbar'
    var1 = 'nir'
    var2 = 'red'
    pq_product = 'pqa'
    pq_var = 'pixelquality'

    # Lake Burley Griffin
    dimensions = {'x':    {'range': (149.07, 149.18)},
                  'y':    {'range': (-35.32, -35.28)},
                  'time': {'range': (datetime(1992, 1, 1), datetime(1992, 12, 31))}}

    b40 = a.create_array((platform, product), [var1], dimensions, 'b40')
    b30 = a.create_array((platform, product), [var2], dimensions, 'b30')
    pq = a.create_array((platform, pq_product), [pq_var], dimensions, 'pq')

    ndvi = a.apply_expression([b40, b30], '((array1 - array2) / (array1 + array2))', 'ndvi')
    mask = a.apply_expression([ndvi, pq], 'array1{(array2 == 32767) | (array2 == 16383) | (array2 == 2457)}', 'mask')
    median_t = a.apply_expression(mask, 'median(array1, 0)', 'medianT')

    result = e.execute_plan(a.plan)

    assert e.cache['b40']
    assert e.cache['b30']
    assert e.cache['pq']
    assert e.cache['b40']['array_result'][var1].size > 0
    assert e.cache['b30']['array_result'][var2].size > 0
    assert e.cache['pq']['array_result'][pq_var].size > 0

    assert e.cache['ndvi']
    assert e.cache['mask']
    assert e.cache['medianT']
Exemple #35
0
def algos (x1, x2, y1, y2, year,algo):
	dc = datacube.Datacube(config="/home/rishabh/.datacube.conf")	
	
	a = AnalyticsEngine()	
	e = ExecutionEngine()

	product = get_reflectance_product(year)
	#assert product
	#dc.list_products() is pandas dataframe, .loc[:]['name'] selects
	#   the products is a pandas series. values is array
	assert (product in dc.list_products().loc[:]['name'].values), "Product not in database"

	# 2 index for platform, 3 for product type
	for prod  in dc.list_products().loc[:].values:
	    if(product == prod[0]):
	        platform = prod[2]
	        product_type = prod[3]

	
	# assert time
	la = dc.load(product=product, x = (x1, x2),y = (y1, y2))
	# this is date of product (la.items()[0])[1].values[0]
	date_of_prod = (la.items()[0])[1].values[0]
	#this is numpy.datetime64

	# TO DO COMPARE numpy.datetime64 with datetime.datetime    
	#assert (date_of_prod >= time1 and date_of_prod <= time2), "Product not in the provided time"

	time1 = datetime(year, 1, 1)
	time2 = datetime(year, 12, 31)
	# calculate output
	dimensions = {'x':    {'range': (x1, x2)},
	              'y':    {'range': (y1, y2)},
	              'time': {'range': (time1, time2)}}

	# create arrays
	b40 = a.create_array((platform, product_type), ['nir'], dimensions, 'b40')
	b30 = a.create_array((platform, product_type), ['red'], dimensions, 'b30')

	if algo == 'RVI':
		#ratio vegetation index
		rvi = a.apply_expression([b40, b30], '(array1 / array2)', 'rvi')

		e.execute_plan(a.plan)

		#result x array
		res = e.cache['rvi']['array_result']['rvi']
		lay = get_right_layer(res)
		lay.plot()
		
		path = 'media/images/' +  'rvi-' + str(year) + '.png'
	elif algo == 'NDVI':
		ndvi = a.apply_expression([b40, b30], '((array1 - array2) / (array1 + array2))', 'ndvi')

		e.execute_plan(a.plan)

		#result x array
		res = e.cache['ndvi']['array_result']['ndvi']
		lay = get_right_layer(res)
		lay.plot()
		path = 'media/images/' +  'ndvi-' + str(year) + '.png'

	elif algo == 'TVI':
		#Transformed Vegetation Index
		tvi = a.apply_expression([b40, b30], '(sqrt(((array1 - array2) / (array1 + array2)) + 0.5) * 100)', 'tvi')

		e.execute_plan(a.plan)

		#result x array
		res = e.cache['tvi']['array_result']['tvi']
		lay = get_right_layer(res)
		lay.plot()
		path = 'media/images/' +  'tvi-' + str(year) + '.png'
	else:
		pass 	
	plt.savefig(path)
	plt.clf()
	return path