def __init__(self, platforms_cfg, refresh=False): if not self.initialised or refresh: self.initialised = True self.platforms = [] self.platform_index = {} self.product_index = {} with cube() as dc: for platform_cfg in platforms_cfg: platform = PlatformLayerDef(platform_cfg, self.product_index, dc=dc) self.platforms.append(platform) self.platform_index[platform.name] = platform
def __init__(self, platforms_cfg, refresh=False): if not self.initialised or refresh: self.initialised = True self.platforms = [] self.platform_index = {} self.product_index = {} svc_cfg = get_service_cfg() self.attribution = svc_cfg.attribution with cube() as dc: for platform_cfg in platforms_cfg: platform = PlatformLayerDef(platform_cfg, self, dc=dc) self.platforms.append(platform) self.platform_index[platform.name] = platform
def ping(): db_ok = False try: with cube() as dc: conn = dc.index._db._engine.connect() # pylint: disable=protected-access results = conn.execute( """ SELECT COUNT(*) FROM agdc.dataset_type""", ) for r in results: db_ok = True except Exception: pass if db_ok: return (render_template("ping.html", status="Up"), 200, resp_headers({"Content-Type": "text/html"})) else: return (render_template("ping.html", status="Down"), 500, resp_headers({"Content-Type": "text/html"}))
def feature_info(args): # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals # Parse GET parameters params = GetFeatureInfoParameters(args) feature_json = {} geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j) # shrink geobox to point # Prepare to extract feature info if geobox_is_point(params.geobox): geo_point_geobox = params.geobox else: geo_point_geobox = datacube.utils.geometry.GeoBox.from_geopolygon( geo_point, params.geobox.resolution, crs=params.geobox.crs) stacker = DataStacker(params.product, geo_point_geobox, params.time) # --- Begin code section requiring datacube. service_cfg = get_service_cfg() with cube() as dc: datasets = stacker.datasets(dc.index, all_time=True, point=geo_point) pq_datasets = stacker.datasets(dc.index, mask=True, all_time=False, point=geo_point) # Taking the data as a single point so our indexes into the data should be 0,0 h_coord = service_cfg.published_CRSs[params.crsid]["horizontal_coord"] v_coord = service_cfg.published_CRSs[params.crsid]["vertical_coord"] s3_bucket = service_cfg.s3_bucket s3_url = service_cfg.s3_url isel_kwargs = {h_coord: 0, v_coord: 0} if datasets: dataset_date_index = {} tz = None for ds in datasets: if tz is None: crs_geo = geometry.CRS("EPSG:4326") ptg = geo_point.to_crs(crs_geo) tz = tz_for_coord(ptg.coords[0][0], ptg.coords[0][1]) ld = local_date(ds, tz=tz) if ld in dataset_date_index: dataset_date_index[ld].append(ds) else: dataset_date_index[ld] = [ds] # Group datasets by time, load only datasets that match the idx_date available_dates = dataset_date_index.keys() ds_at_time = dataset_date_index.get(params.time, []) _LOG.info("%d datasets, %d at target date", len(datasets), len(ds_at_time)) if len(ds_at_time) > 0: pixel_ds = None data = stacker.data( ds_at_time, skip_corrections=True, manual_merge=params.product.data_manual_merge, fuse_func=params.product.fuse_func) # Non-geographic coordinate systems need to be projected onto a geographic # coordinate system. Why not use EPSG:4326? # Extract coordinates in CRS data_x = getattr(data, h_coord) data_y = getattr(data, v_coord) x = data_x[isel_kwargs[h_coord]].item() y = data_y[isel_kwargs[v_coord]].item() pt = geometry.point(x, y, params.crs) if params.product.multi_product: feature_json["source_product"] = "%s (%s)" % ( ds_at_time[0].type.name, ds_at_time[0].metadata_doc["platform"]["code"]) # Project to EPSG:4326 crs_geo = geometry.CRS("EPSG:4326") ptg = pt.to_crs(crs_geo) # Capture lat/long coordinates feature_json["lon"], feature_json["lat"] = ptg.coords[0] # Extract data pixel pixel_ds = data.isel(**isel_kwargs) # Get accurate timestamp from dataset feature_json["time"] = dataset_center_time( ds_at_time[0]).strftime("%Y-%m-%d %H:%M:%S UTC") # Collect raw band values for pixel and derived bands from styles feature_json["bands"] = _make_band_dict( params.product, pixel_ds, stacker.needed_bands()) derived_band_dict = _make_derived_band_dict( pixel_ds, params.product.style_index) if derived_band_dict: feature_json["band_derived"] = derived_band_dict if callable(params.product.feature_info_include_custom): additional_data = params.product.feature_info_include_custom( feature_json["bands"]) feature_json.update(additional_data) my_flags = 0 for pqd in pq_datasets: idx_date = dataset_center_time(pqd) if idx_date == params.time: pq_data = stacker.data([pqd], mask=True) pq_pixel_ds = pq_data.isel(**isel_kwargs) # PQ flags m = params.product.pq_product.measurements[ params.product.pq_band] flags = pq_pixel_ds[params.product.pq_band].item() if not flags & ~params.product.info_mask: my_flags = my_flags | flags else: continue feature_json["flags"] = {} for mk, mv in m["flags_definition"].items(): if mk in params.product.ignore_flags_info: continue bits = mv["bits"] values = mv["values"] if not isinstance(bits, int): continue flag = 1 << bits if my_flags & flag: val = values['1'] else: val = values['0'] feature_json["flags"][mk] = val feature_json["data_available_for_dates"] = [ d.strftime("%Y-%m-%d") for d in sorted(available_dates) ] feature_json["data_links"] = sorted( get_s3_browser_uris(ds_at_time, s3_url, s3_bucket)) if params.product.feature_info_include_utc_dates: feature_json["data_available_for_utc_dates"] = sorted( d.center_time.strftime("%Y-%m-%d") for d in datasets) # --- End code section requiring datacube. result = { "type": "FeatureCollection", "features": [{ "type": "Feature", "properties": feature_json }] } return json.dumps(result), 200, resp_headers( {"Content-Type": "application/json"})
def get_map(args): # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals # Parse GET parameters params = GetMapParameters(args) with cube() as dc: # Tiling. stacker = DataStacker(params.product, params.geobox, params.time, params.resampling, style=params.style) datasets = stacker.datasets(dc.index) zoomed_out = params.zf < params.product.min_zoom too_many_datasets = (params.product.max_datasets_wms > 0 and len(datasets) > params.product.max_datasets_wms) if not datasets: body = _write_empty(params.geobox) elif too_many_datasets: body = _write_polygon(params.geobox, params.geobox.extent, params.product.zoom_fill) elif zoomed_out: # Zoomed out to far to properly render data. # Construct a polygon which is the union of the extents of the matching datasets. extent = None extent_crs = None for ds in datasets: if extent: new_extent = bbox_to_geom(ds.extent.boundingbox, ds.extent.crs) if new_extent.crs != extent_crs: new_extent = new_extent.to_crs(extent_crs) extent = extent.union(new_extent) else: extent = bbox_to_geom(ds.extent.boundingbox, ds.extent.crs) extent_crs = extent.crs extent = extent.to_crs(params.crs) body = _write_polygon(params.geobox, extent, params.product.zoom_fill) else: _LOG.debug("load start %s %s", datetime.now().time(), args["requestid"]) data = stacker.data(datasets, manual_merge=params.product.data_manual_merge, fuse_func=params.product.fuse_func) _LOG.debug("load stop %s %s", datetime.now().time(), args["requestid"]) if params.style.masks: if params.product.pq_name == params.product.name: pq_band_data = ( data[params.product.pq_band].dims, data[params.product.pq_band].astype("uint16")) pq_data = xarray.Dataset( {params.product.pq_band: pq_band_data}, coords=data[params.product.pq_band].coords) flag_def = data[params.product.pq_band].flags_definition pq_data[params.product. pq_band].attrs["flags_definition"] = flag_def else: pq_datasets = stacker.datasets( dc.index, mask=True, all_time=params.product.pq_ignore_time) if pq_datasets: pq_data = stacker.data( pq_datasets, mask=True, manual_merge=params.product.pq_manual_merge, fuse_func=params.product.pq_fuse_func) else: pq_data = None else: pq_data = None extent_mask = None if not params.product.data_manual_merge: for band in params.style.needed_bands: for f in params.product.extent_mask_func: if extent_mask is None: extent_mask = f(data, band) else: extent_mask &= f(data, band) if data is None or (params.style.masks and pq_data is None): body = _write_empty(params.geobox) else: body = _write_png(data, pq_data, params.style, extent_mask) return body, 200, resp_headers({"Content-Type": "image/png"})