def init_dea(index: Index, with_permissions: bool, log_header=print_header, log=print_): """ Create or update a DEA configured ODC instance. """ log_header(f"ODC init of {index.url}") was_created = index.init_db(with_default_types=False, with_permissions=with_permissions) if was_created: log('Created.') else: log('Updated.') log('Checking indexes/views.') index.metadata_types.check_field_indexes( allow_table_lock=True, rebuild_indexes=False, rebuild_views=True, ) log_header('Checking DEA metadata types') # Add DEA metadata types, products. for _, md_type_def in read_documents(DEA_MD_TYPES): md = index.metadata_types.add( index.metadata_types.from_doc(md_type_def)) log(f"{md.name}") log_header('Checking DEA products') for _, product_def in read_documents(*DEA_PRODUCTS_DIR.glob('*.yaml')): product = index.products.add_document(product_def) log(f"{product.name}") log_header('Checking DEA ingested definitions') for path in DEA_INGESTION_DIR.glob('*.yaml'): ingest_config = ingest.load_config_from_file(index, path) source_type, output_type = ingest.ensure_output_type( index, ingest_config, allow_product_changes=True) log(f"{output_type.name:<20}\t\t← {source_type.name}")
def index(db): """ :type db: datacube.index.postgres._api.PostgresDb """ return Index(db)
import os from datacube.index.postgres._connections import PostgresDb from datacube.index._api import Index from datacube.api import GridWorkflow from datacube.storage.storage import write_dataset_to_netcdf from pprint import pprint import numpy nc_filename = os.path.expanduser( '~/datacube_ingest/recipes/ndvi_mean/ndvi_mean_%d_%d_%s.nc' % (12, -16, '1987')) db = PostgresDb.from_config() i = Index(db) gwf = GridWorkflow(i, product='ls8_espa_mexico') cells_list = gwf.list_cells(product='ls8_espa_mexico', x=(-106, -101), y=(19, 23)) sr = gwf.load(cells_list[(12, -16)], dask_chunks={'x': 1000, 'y': 1000}) sr['ndvi'] = (sr.nir - sr.red) / (sr.nir + sr.red) * 10000 ndvi = sr.drop(['pixel_qa', 'blue', 'red', 'green', 'nir', 'swir1', 'swir2']) # Run temporal reductions and rename DataArrays ndvi_mean = ndvi.mean('time', keep_attrs=True) ndvi_mean = ndvi_mean.astype('int16') ndvi_mean.attrs['crs'] = sr.attrs['crs'] write_dataset_to_netcdf(ndvi_mean, nc_filename) print(nc_filename)
def index(db, local_config): """ :type db: datacube.index.postgres._api.PostgresDb """ return Index(db, local_config)