def ingest_work(config, source_type, output_type, index, sources, geobox): namemap = get_namemap(config) measurements = get_measurements(source_type, config) variable_params = get_variable_params(config) global_attributes = config['global_attributes'] with datacube.set_options(reproject_threads=1): fuse_func = {'copy': None}[config.get(FUSER_KEY, 'copy')] data = Datacube.product_data(sources, geobox, measurements, fuse_func=fuse_func) nudata = data.rename(namemap) file_path = get_filename(config, index, sources) def _make_dataset(labels, sources): sources_union = union_points(*[source.extent.to_crs(geobox.crs).points for source in sources]) valid_data = intersect_points(geobox.extent.points, sources_union) dataset = make_dataset(dataset_type=output_type, sources=sources, extent=geobox.extent, center_time=labels['time'], uri=file_path.absolute().as_uri(), app_info=get_app_metadata(config, config['filename']), valid_data=GeoPolygon(valid_data, geobox.crs)) return dataset datasets = xr_apply(sources, _make_dataset, dtype='O') # Store in Dataarray to associate Time -> Dataset nudata['dataset'] = datasets_to_doc(datasets) write_dataset_to_netcdf(nudata, global_attributes, variable_params, file_path) return datasets
def check_open_with_api(index): from datacube.api.core import Datacube datacube = Datacube(index=index) input_type_name = 'ls5_nbar_albers' input_type = datacube.index.datasets.types.get_by_name(input_type_name) geobox = GeoBox(200, 200, Affine(25, 0.0, 1500000, 0.0, -25, -3900000), CRS('EPSG:3577')) observations = datacube.product_observations('ls5_nbar_albers', geobox.extent) sources = datacube.product_sources(observations, lambda ds: ds.center_time, 'time', 'seconds since 1970-01-01 00:00:00') data = datacube.product_data(sources, geobox, input_type.measurements.values()) assert data.blue.shape == (1, 200, 200)
def check_open_with_api(index): from datacube.api.core import Datacube datacube = Datacube(index=index) input_type_name = 'ls5_nbar_albers' input_type = datacube.index.datasets.types.get_by_name(input_type_name) geobox = GeoBox(200, 200, Affine(25, 0.0, 1500000, 0.0, -25, -3900000), CRS('EPSG:3577')) observations = datacube.product_observations(product='ls5_nbar_albers', geopolygon=geobox.extent) sources = datacube.product_sources(observations, lambda ds: ds.center_time, 'time', 'seconds since 1970-01-01 00:00:00') data = datacube.product_data(sources, geobox, input_type.measurements.values()) assert data.blue.shape == (1, 200, 200)
def ingest_work(tile_index, sources): geobox = GeoBox.from_grid_spec(grid_spec, tile_index) # print ("in ingest.py in ingest_word") data = Datacube.product_data(sources, geobox, measurements) nudata = data.rename(namemap) file_path = file_path_template.format(tile_index=tile_index, start_time=to_datetime(sources.time.values[0]).strftime('%Y%m%d%H%M%S%f'), end_time=to_datetime(sources.time.values[-1]).strftime('%Y%m%d%H%M%S%f')) # TODO: algorithm params print ("Writing product") nudatasets = write_product(nudata, sources, output_type, config['global_attributes'], variable_params, Path(file_path)) return nudatasets
def ingest_work(tile_index, sources): geobox = GeoBox.from_grid_spec(grid_spec, tile_index) # print ("in ingest.py in ingest_word") data = Datacube.product_data(sources, geobox, measurements) nudata = data.rename(namemap) file_path = file_path_template.format( tile_index=tile_index, start_time=to_datetime( sources.time.values[0]).strftime('%Y%m%d%H%M%S%f'), end_time=to_datetime( sources.time.values[-1]).strftime('%Y%m%d%H%M%S%f')) # TODO: algorithm params print("Writing product") nudatasets = write_product(nudata, sources, output_type, config['global_attributes'], variable_params, Path(file_path)) return nudatasets
def ingest_work(config, source_type, output_type, index, sources, geobox): namemap = get_namemap(config) measurements = get_measurements(source_type, config) variable_params = get_variable_params(config) global_attributes = config['global_attributes'] with datacube.set_options(reproject_threads=1): fuse_func = {'copy': None}[config.get(FUSER_KEY, 'copy')] data = Datacube.product_data(sources, geobox, measurements, fuse_func=fuse_func) nudata = data.rename(namemap) file_path = get_filename(config, index, sources) def _make_dataset(labels, sources): sources_union = union_points( *[source.extent.to_crs(geobox.crs).points for source in sources]) valid_data = intersect_points(geobox.extent.points, sources_union) dataset = make_dataset(dataset_type=output_type, sources=sources, extent=geobox.extent, center_time=labels['time'], uri=file_path.absolute().as_uri(), app_info=get_app_metadata( config, config['filename']), valid_data=GeoPolygon(valid_data, geobox.crs)) return dataset datasets = xr_apply( sources, _make_dataset, dtype='O') # Store in Dataarray to associate Time -> Dataset nudata['dataset'] = datasets_to_doc(datasets) write_dataset_to_netcdf(nudata, global_attributes, variable_params, file_path) return datasets