예제 #1
0
def feature_info(args):
    # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
    # Parse GET parameters
    params = GetFeatureInfoParameters(args)
    feature_json = {}

    geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j)
    # shrink geobox to point
    # Prepare to extract feature info
    if geobox_is_point(params.geobox):
        geo_point_geobox = params.geobox
    else:
        geo_point_geobox = datacube.utils.geometry.GeoBox.from_geopolygon(
            geo_point, params.geobox.resolution, crs=params.geobox.crs)
    stacker = DataStacker(params.product, geo_point_geobox, params.time)

    # --- Begin code section requiring datacube.
    service_cfg = get_service_cfg()
    with cube() as dc:
        datasets = stacker.datasets(dc.index, all_time=True, point=geo_point)
        pq_datasets = stacker.datasets(dc.index,
                                       mask=True,
                                       all_time=False,
                                       point=geo_point)

        # Taking the data as a single point so our indexes into the data should be 0,0
        h_coord = service_cfg.published_CRSs[params.crsid]["horizontal_coord"]
        v_coord = service_cfg.published_CRSs[params.crsid]["vertical_coord"]
        s3_bucket = service_cfg.s3_bucket
        s3_url = service_cfg.s3_url
        isel_kwargs = {h_coord: 0, v_coord: 0}
        if datasets:
            dataset_date_index = {}
            tz = None
            for ds in datasets:
                if tz is None:
                    crs_geo = geometry.CRS("EPSG:4326")
                    ptg = geo_point.to_crs(crs_geo)
                    tz = tz_for_coord(ptg.coords[0][0], ptg.coords[0][1])
                ld = local_date(ds, tz=tz)
                if ld in dataset_date_index:
                    dataset_date_index[ld].append(ds)
                else:
                    dataset_date_index[ld] = [ds]
            # Group datasets by time, load only datasets that match the idx_date
            available_dates = dataset_date_index.keys()
            ds_at_time = dataset_date_index.get(params.time, [])
            _LOG.info("%d datasets, %d at target date", len(datasets),
                      len(ds_at_time))
            if len(ds_at_time) > 0:
                pixel_ds = None
                data = stacker.data(
                    ds_at_time,
                    skip_corrections=True,
                    manual_merge=params.product.data_manual_merge,
                    fuse_func=params.product.fuse_func)

                # Non-geographic coordinate systems need to be projected onto a geographic
                # coordinate system.  Why not use EPSG:4326?
                # Extract coordinates in CRS
                data_x = getattr(data, h_coord)
                data_y = getattr(data, v_coord)

                x = data_x[isel_kwargs[h_coord]].item()
                y = data_y[isel_kwargs[v_coord]].item()
                pt = geometry.point(x, y, params.crs)

                if params.product.multi_product:
                    feature_json["source_product"] = "%s (%s)" % (
                        ds_at_time[0].type.name,
                        ds_at_time[0].metadata_doc["platform"]["code"])

                # Project to EPSG:4326
                crs_geo = geometry.CRS("EPSG:4326")
                ptg = pt.to_crs(crs_geo)

                # Capture lat/long coordinates
                feature_json["lon"], feature_json["lat"] = ptg.coords[0]

                # Extract data pixel
                pixel_ds = data.isel(**isel_kwargs)

                # Get accurate timestamp from dataset
                feature_json["time"] = dataset_center_time(
                    ds_at_time[0]).strftime("%Y-%m-%d %H:%M:%S UTC")

                # Collect raw band values for pixel and derived bands from styles
                feature_json["bands"] = _make_band_dict(
                    params.product, pixel_ds, stacker.needed_bands())
                derived_band_dict = _make_derived_band_dict(
                    pixel_ds, params.product.style_index)
                if derived_band_dict:
                    feature_json["band_derived"] = derived_band_dict
                if callable(params.product.feature_info_include_custom):
                    additional_data = params.product.feature_info_include_custom(
                        feature_json["bands"])
                    feature_json.update(additional_data)

            my_flags = 0
            for pqd in pq_datasets:
                idx_date = dataset_center_time(pqd)
                if idx_date == params.time:
                    pq_data = stacker.data([pqd], mask=True)
                    pq_pixel_ds = pq_data.isel(**isel_kwargs)
                    # PQ flags
                    m = params.product.pq_product.measurements[
                        params.product.pq_band]
                    flags = pq_pixel_ds[params.product.pq_band].item()
                    if not flags & ~params.product.info_mask:
                        my_flags = my_flags | flags
                    else:
                        continue
                    feature_json["flags"] = {}
                    for mk, mv in m["flags_definition"].items():
                        if mk in params.product.ignore_flags_info:
                            continue
                        bits = mv["bits"]
                        values = mv["values"]
                        if not isinstance(bits, int):
                            continue
                        flag = 1 << bits
                        if my_flags & flag:
                            val = values['1']
                        else:
                            val = values['0']
                        feature_json["flags"][mk] = val

            feature_json["data_available_for_dates"] = [
                d.strftime("%Y-%m-%d") for d in sorted(available_dates)
            ]
            feature_json["data_links"] = sorted(
                get_s3_browser_uris(ds_at_time, s3_url, s3_bucket))
            if params.product.feature_info_include_utc_dates:
                feature_json["data_available_for_utc_dates"] = sorted(
                    d.center_time.strftime("%Y-%m-%d") for d in datasets)
    # --- End code section requiring datacube.

    result = {
        "type": "FeatureCollection",
        "features": [{
            "type": "Feature",
            "properties": feature_json
        }]
    }
    return json.dumps(result), 200, resp_headers(
        {"Content-Type": "application/json"})
예제 #2
0
def determine_product_ranges(dc, dc_product, extractor):
    # pylint: disable=too-many-locals, too-many-branches, too-many-statements, protected-access
    start = datetime.now()
    print("Product: ", dc_product.name)
    r = {
        "lat": {
            "min": None,
            "max": None
        },
        "lon": {
            "min": None,
            "max": None
        },
    }
    sub_r = {}
    time_set = set()
    svc = get_service_cfg()
    print("OK, Let's do it")
    crsids = get_crsids(svc)
    calculate_extent = not svc.use_default_extent
    extents = {crsid: None for crsid in crsids}
    crses = get_crses(svc)
    ds_count = 0
    for ds in dc.find_datasets(product=dc_product.name):
        print("Processing a dataset", ds.id)
        loc_date = local_date(ds)
        time_set.add(loc_date)
        if calculate_extent or extractor is not None:
            if extractor is not None:
                path = extractor(ds)
                if path not in sub_r:
                    sub_r[path] = {
                        "lat": {
                            "min": None,
                            "max": None,
                        },
                        "lon": {
                            "min": None,
                            "max": None,
                        },
                        "time_set": set(),
                        "extents": {crsid: None
                                    for crsid in crsids}
                    }
                sub_r[path]["lat"]["min"] = accum_min(
                    sub_r[path]["lat"]["min"], ds.metadata.lat.begin)
                sub_r[path]["lat"]["max"] = accum_max(
                    sub_r[path]["lat"]["max"], ds.metadata.lat.end)
                sub_r[path]["lon"]["min"] = accum_min(
                    sub_r[path]["lon"]["min"], ds.metadata.lon.begin)
                sub_r[path]["lon"]["max"] = accum_max(
                    sub_r[path]["lon"]["max"], ds.metadata.lon.end)
            else:
                path = None

            r["lat"]["min"] = accum_min(r["lat"]["min"], ds.metadata.lat.begin)
            r["lat"]["max"] = accum_max(r["lat"]["max"], ds.metadata.lat.end)
            r["lon"]["min"] = accum_min(r["lon"]["min"], ds.metadata.lon.begin)
            r["lon"]["max"] = accum_max(r["lon"]["max"], ds.metadata.lon.end)

            if path is not None:
                sub_r[path]["time_set"].add(loc_date)

            for crsid in crsids:
                print("Working with CRS", crsid)
                crs = crses[crsid]
                ext = ds.extent
                if ext.crs != crs:
                    ext = ext.to_crs(crs)
                cvx_ext = ext.convex_hull
                if cvx_ext != ext:
                    print("INFO: Dataset", ds.id, "CRS", crsid,
                          "extent is not convex.")
                if extents[crsid] is None:
                    extents[crsid] = cvx_ext
                else:
                    if not extents[crsid].is_valid:
                        print("WARNING: Extent Union for", ds.id, "CRS", crsid,
                              "is not valid")
                    if not cvx_ext.is_valid:
                        print("WARNING: Extent for CRS", crsid, "is not valid")
                    union = extents[crsid].union(cvx_ext)
                    if union._geom is not None:
                        extents[crsid] = union
                    else:
                        print("WARNING: Dataset", ds.id, "CRS", crsid,
                              "union topology exception, ignoring union")
                if path is not None:
                    if sub_r[path]["extents"][crsid] is None:
                        sub_r[path]["extents"][crsid] = cvx_ext
                    else:
                        sub_r[path]["extents"][crsid] = sub_r[path]["extents"][
                            crsid].union(cvx_ext)
        ds_count += 1

    # Default extent usage
    if not calculate_extent and ds_count > 0:
        for crsid in crsids:
            crs = crses[crsid]
            default = datacube.utils.geometry.Geometry(DEFAULT_GEOJSON,
                                                       crs=DEFAULT_GEOJSON_CRS)
            extents[crsid] = default.to_crs(crs)

    r["times"] = sorted(time_set)
    r["time_set"] = time_set
    r["bboxes"] = {
        crsid: jsonise_bbox(extents[crsid].boundingbox)
        for crsid in crsids
    }
    print("LATS: ", r["lat"], " LONS: ", r["lon"])
    if extractor is not None:
        for path in sub_r.keys():
            sub_r[path]["times"] = sorted(sub_r[path]["time_set"])
            sub_r[path]["bboxes"] = {
                crsid: jsonise_bbox(sub_r[path]["extents"][crsid].boundingbox)
                for crsid in crsids
            }
            del sub_r[path]["extents"]
        r["sub_products"] = sub_r
    end = datetime.now()
    print("Scanned %d datasets in %d seconds" % (ds_count,
                                                 (end - start).seconds))
    return r