Beispiel #1
0
def ogc_impl():
    #pylint: disable=too-many-branches
    nocase_args = lower_get_args()
    nocase_args = capture_headers(request, nocase_args)
    service = nocase_args.get("service", "").upper()
    if service:
        return ogc_svc_impl(service.lower())

    # create dummy env if not exists
    try:
        # service argument is only required (in fact only defined) by OGC for
        # GetCapabilities requests.  As long as we are persisting with a single
        # routing end point for all services, we must derive the service from the request
        # parameter.
        # This is a quick hack to fix #64.  Service and operation routing could be
        # handled more elegantly.
        op = nocase_args.get("request", "").upper()
        if op in WMS_REQUESTS:
            return ogc_svc_impl("wms")
        elif op in WCS_REQUESTS:
            return ogc_svc_impl("wcs")
        else:
            # Should we return a WMS or WCS exception if there is no service specified?
            # Defaulting to WMS because that's what we already have.
            raise WMSException("Invalid service and/or request",
                               locator="Service and request parameters")
    except OGCException as e:
        _LOG.error("Handled Error: %s", repr(e.errors))
        return e.exception_response()
    except Exception as e:
        tb = sys.exc_info()[2]
        ogc_e = WMSException("Unexpected server error: %s" % str(e),
                             http_response=500)
        return ogc_e.exception_response(traceback=traceback.extract_tb(tb))
Beispiel #2
0
 def method_specific_init(self, args):
     # Validate Formata parameter
     self.format = get_arg(args,
                           "info_format",
                           "info format",
                           lower=True,
                           errcode=WMSException.INVALID_FORMAT,
                           permitted_values=["application/json"])
     # Point coords
     if self.version == "1.1.1":
         coords = ["x", "y"]
     else:
         coords = ["i", "j"]
     i = args.get(coords[0])
     j = args.get(coords[1])
     if i is None:
         raise WMSException("HorizontalCoordinate not supplied",
                            WMSException.INVALID_POINT,
                            "%s parameter" % coords[0])
     if j is None:
         raise WMSException("Vertical coordinate not supplied",
                            WMSException.INVALID_POINT,
                            "%s parameter" % coords[0])
     self.i = int(i)
     self.j = int(j)
Beispiel #3
0
    def method_specific_init(self, args):
        # Validate Format parameter
        self.format = get_arg(args,
                              "format",
                              "image format",
                              errcode=WMSException.INVALID_FORMAT,
                              lower=True,
                              permitted_values=["image/png"])
        # Styles
        self.styles = args.get("styles", "").split(",")
        if len(self.styles) != 1:
            raise WMSException("Multi-layer GetMap requests not supported")
        style_r = self.styles[0]
        if not style_r:
            style_r = self.product.default_style.name
        self.style = self.product.style_index.get(style_r)
        if not self.style:
            raise WMSException("Style %s is not defined" % style_r,
                               WMSException.STYLE_NOT_DEFINED,
                               locator="Style parameter")
        # Zoom factor
        self.zf = zoom_factor(args, self.crs)

        # TODO: Do we need to make resampling method configurable?
        self.resampling = Resampling.nearest
Beispiel #4
0
def get_product_from_arg(args, argname="layers"):
    layers = args.get(argname, "").split(",")
    if len(layers) != 1:
        raise WMSException("Multi-layer requests not supported")
    layer = layers[0]
    layer_chunks = layer.split("__")
    layer = layer_chunks[0]
    cfg = get_config()
    product = cfg.product_index.get(layer)
    if not product:
        raise WMSException("Layer %s is not defined" % layer,
                           WMSException.LAYER_NOT_DEFINED,
                           locator="Layer parameter")
    return product
Beispiel #5
0
def get_arg(args, argname, verbose_name, lower=False,
            errcode=None, permitted_values=None):
    fmt = args.get(argname, "")
    if lower:
        fmt = fmt.lower()
    if not fmt:
        raise WMSException("No %s specified" % verbose_name,
                           errcode,
                           locator="%s parameter" % argname)

    if permitted_values:
        if fmt not in permitted_values:
            raise WMSException("%s %s is not supported" % (verbose_name, fmt),
                               errcode,
                               locator="%s parameter" % argname)
    return fmt
Beispiel #6
0
    def __init__(self, args):
        self.product = get_product_from_arg(args, 'layer')

        # Validate Format parameter
        self.format = get_arg(args,
                              "format",
                              "image format",
                              errcode=WMSException.INVALID_FORMAT,
                              lower=True,
                              permitted_values=["image/png"])
        arg_styles = args.get("styles", None)
        if arg_styles:
            # Styles
            try:
                self.styles = [
                    self.product.style_index[style_name]
                    for style_name in arg_styles.split(",")
                ]
            except KeyError as e:
                raise WMSException(f"Style {e} not valid for layer.",
                                   WMSException.STYLE_NOT_DEFINED,
                                   locator="STYLES parameter")
        else:
            self.styles = [self.product.default_style]
        # Time parameter
        self.times = get_times(args, self.product)
Beispiel #7
0
def create_legends_from_styles(styles, ndates=0):
    # Run through all values in style cfg and generate
    imgs = []
    for s in styles:
        url = s.legend_override_with_url()
        if url:
            img = get_image_from_url(url)
            if img:
                imgs.append(img)
        elif not s.auto_legend:
            raise WMSException(f"Style {s.name} does not have a legend.")
        else:
            if ndates in [0, 1]:
                bytesio = io.BytesIO()
                s.single_date_legend(bytesio)
                bytesio.seek(0)
                imgs.append(Image.open(bytesio))
            for mdh in s.multi_date_handlers:
                if ndates == 0 or mdh.applies_to(ndates):
                    bytesio = io.BytesIO()
                    if mdh.legend(bytesio):
                        bytesio.seek(0)
                        imgs.append(Image.open(bytesio))

    if not imgs:
        return None
    min_shape = sorted([(np.sum(i.size), i.size) for i in imgs])[0][1]
    imgs_comb = np.vstack([np.asarray(i.resize(min_shape)) for i in imgs])
    imgs_comb = Image.fromarray(imgs_comb)
    b = io.BytesIO()
    imgs_comb.save(b, 'png')
    legend = make_response(b.getvalue())
    legend.mimetype = 'image/png'
    b.close()
    return legend
def legend_graphic(args):
    params = GetLegendGraphicParameters(args)
    img = create_legends_from_styles(params.styles, ndates=len(params.times))
    if img is None:
        raise WMSException("No legend is available for this request",
                           http_response=404)
    return img
Beispiel #9
0
 def get_multi_date_handler(self, count):
     for mdh in self.multi_date_handlers:
         if mdh.applies_to(count):
             return mdh
     if count in [0, 1]:
         return None
     raise WMSException(
         f"Style {self.name} does not support requests with {count} dates")
Beispiel #10
0
def ogc_impl():
    #pylint: disable=too-many-branches
    nocase_args = lower_get_args()
    nocase_args = capture_headers(request, nocase_args)
    service = nocase_args.get("service", "").upper()
    if service:
        return ogc_svc_impl(service.lower())

    # create dummy env if not exists
    try:
        # service argument is only required (in fact only defined) by OGC for
        # GetCapabilities requests.  As long as we are persisting with a single
        # routing end point for all services, we must derive the service from the request
        # parameter.
        # This is a quick hack to fix #64.  Service and operation routing could be
        # handled more elegantly.
        op = nocase_args.get("request", "").upper()
        if op in WMS_REQUESTS:
            return ogc_svc_impl("wms")
        elif op in WCS_REQUESTS:
            return ogc_svc_impl("wcs")
        elif op:
            # Should we return a WMS or WCS exception if there is no service specified?
            # Defaulting to WMS because that's what we already have.
            raise WMSException("Invalid service and/or request", locator="Service and request parameters")
        else:
            cfg = get_config()
            url = nocase_args.get('Host', nocase_args['url_root'])
            base_url = get_service_base_url(cfg.allowed_urls, url)
            return (render_template(
                            "index.html",
                            cfg=cfg,
                            supported=OWS_SUPPORTED,
                            base_url=base_url,
                            version=__version__,
                    ),
                    200,
                    resp_headers({"Content-Type": "text/html"}))
    except OGCException as e:
        _LOG.error("Handled Error: %s", repr(e.errors))
        return e.exception_response()
    except Exception as e: # pylint: disable=broad-except
        tb = sys.exc_info()[2]
        ogc_e = WMSException("Unexpected server error: %s" % str(e), http_response=500)
        return ogc_e.exception_response(traceback=traceback.extract_tb(tb))
Beispiel #11
0
def handle_wms(nocase_args):
    operation = nocase_args.get("request", "").upper()
    # WMS operation Map
    if not operation:
        raise WMSException("No operation specified",
                           locator="Request parameter")
    elif operation == "GETCAPABILITIES":
        return get_capabilities(nocase_args)
    elif operation == "GETMAP":
        return get_map(nocase_args)
    elif operation == "GETFEATUREINFO":
        return feature_info(nocase_args)
    elif operation == "GETLEGENDGRAPHIC":
        return legend_graphic(nocase_args)
    else:
        raise WMSException("Unrecognised operation: %s" % operation,
                           WMSException.OPERATION_NOT_SUPPORTED,
                           "Request parameter")
Beispiel #12
0
def handle_wms(nocase_args):
    operation = nocase_args.get("request", "").upper()
    # WMS operation Map
    if not operation:
        raise WMSException("No operation specified",
                           locator="Request parameter")
    elif operation == "GETCAPABILITIES":
        return get_capabilities(nocase_args)
    elif operation == "GETMAP":
        return get_map(nocase_args)
    elif operation == "GETFEATUREINFO":
        return feature_info(nocase_args)
    elif operation == "GETLEGENDGRAPHIC":
        raise WMSException(
            "Operation GetLegendGraphic no longer supported.  Please use the LegendURL entry for the style from the GetCapabilities document instead",
            WMSException.OPERATION_NOT_SUPPORTED, "Request parameter")
    else:
        raise WMSException("Unrecognised operation: %s" % operation,
                           WMSException.OPERATION_NOT_SUPPORTED,
                           "Request parameter")
Beispiel #13
0
def parse_time_item(item, product, raw_product):
    times = item.split('/')
    # Time range handling follows the implementation described by GeoServer
    # https://docs.geoserver.org/stable/en/user/services/wms/time.html

    # If all times are equal we can proceed
    if len(times) > 1:
        start, end = parse_wms_time_strings(times)
        start, end = start.date(), end.date()
        matching_times = [
            t for t in product.ranges['times'] if start <= t <= end
        ]
        if matching_times:
            # default to the first matching time
            return matching_times[0]
        else:
            raise WMSException(
                "Time dimension range '%s'-'%s' not valid for this layer" %
                (start, end),
                WMSException.INVALID_DIMENSION_VALUE,
                locator="Time parameter")
    elif not times[0]:
        # default to last available time if not supplied.
        product_times = get_times_for_product(product, raw_product)
        return product_times[-1]
    try:
        time = parse(times[0]).date()
    except ValueError:
        raise WMSException(
            "Time dimension value '%s' not valid for this layer" % times[0],
            WMSException.INVALID_DIMENSION_VALUE,
            locator="Time parameter")

    # Validate time parameter for requested layer.
    if time not in product.ranges["time_set"]:
        raise WMSException(
            "Time dimension value '%s' not valid for this layer" % times[0],
            WMSException.INVALID_DIMENSION_VALUE,
            locator="Time parameter")
    return time
Beispiel #14
0
    def data(self, datasets_by_query, skip_corrections=False):
        # pylint: disable=too-many-locals, consider-using-enumerate
        # datasets is an XArray DataArray of datasets grouped by time.
        data = None
        for pbq, datasets in datasets_by_query.items():
            measurements = pbq.products[0].lookup_measurements(pbq.bands)
            fuse_func = pbq.fuse_func
            if pbq.manual_merge:
                qry_result = self.manual_data_stack(datasets,
                                                    measurements,
                                                    pbq.bands,
                                                    skip_corrections,
                                                    fuse_func=fuse_func)
            else:
                qry_result = self.read_data(datasets,
                                            measurements,
                                            self._geobox,
                                            self._resampling,
                                            fuse_func=fuse_func)
            if data is None:
                data = qry_result
                continue
            if pbq.ignore_time:
                # regularise time dimension:
                if len(qry_result.time) != 1:
                    raise WMSException(
                        "Cannot ignore time on PQ (flag) bands from a time-aware product"
                    )
                if len(qry_result.time) == len(data.time):
                    qry_result["time"] = data.time
                else:
                    data_new_bands = {}
                    for band in pbq.bands:
                        band_data = qry_result[band]
                        timeless_band_data = band_data.sel(
                            time=qry_result.time.values[0])
                        band_time_slices = []
                        for dt in data.time.values:
                            band_time_slices.append(timeless_band_data)
                        timed_band_data = xarray.concat(
                            band_time_slices, data.time)
                        data_new_bands[band] = timed_band_data

                    data = data.assign(data_new_bands)
                    continue
            for band in pbq.bands:
                data = data.assign(
                    {band: qry_result[band]
                     for band in pbq.bands})

        return data
Beispiel #15
0
    def method_specific_init(self, args):
        # Validate Format parameter
        self.format = get_arg(args,
                              "format",
                              "image format",
                              errcode=WMSException.INVALID_FORMAT,
                              lower=True,
                              permitted_values=["image/png"])
        # Styles
        self.styles = args.get("styles", "").split(",")
        if len(self.styles) != 1:
            raise WMSException("Multi-layer GetMap requests not supported")
        style_r = self.styles[0]
        if not style_r:
            style_r = self.product.default_style.name
        self.style = self.product.style_index.get(style_r)
        if not self.style:
            raise WMSException("Style %s is not defined" % style_r,
                               WMSException.STYLE_NOT_DEFINED,
                               locator="Style parameter",
                               valid_keys=list(self.product.style_index))
        cfg = get_config()
        if self.geobox.width > cfg.wms_max_width:
            raise WMSException(
                f"Width {self.geobox.width} exceeds supported maximum {self.cfg.wms_max_width}.",
                locator="Width parameter")
        if self.geobox.height > cfg.wms_max_height:
            raise WMSException(
                f"Width {self.geobox.height} exceeds supported maximum {self.cfg.wms_max_height}.",
                locator="Height parameter")

        # Zoom factor
        self.zf = zoom_factor(args, self.crs)

        self.ows_stats = bool(args.get("ows_stats"))

        # TODO: Do we need to make resampling method configurable?
        self.resampling = Resampling.nearest
Beispiel #16
0
def parse_wms_time_strings(parts):
    start = parse_wms_time_string(parts[0])
    end = parse_wms_time_string(parts[-1], start=False)

    a_tiny_bit = relativedelta(microseconds=1)
    # Follows GeoServer https://docs.geoserver.org/stable/en/user/services/wms/time.html#reduced-accuracy-times

    if isinstance(start, relativedelta):
        if isinstance(end, relativedelta):
            raise WMSException("Could not understand time value '%s'" % parts,
                               WMSException.INVALID_DIMENSION_VALUE,
                               locator="Time parameter")
        fuzzy_end = parse_wms_time_string(parts[-1], start=True)
        return fuzzy_end - start + a_tiny_bit, end
    if isinstance(end, relativedelta):
        return start, start + end - a_tiny_bit
    return start, end
Beispiel #17
0
    def __init__(self, args):
        self.product = get_product_from_arg(args, 'layer')

        # Validate Format parameter
        self.format = get_arg(args,
                              "format",
                              "image format",
                              errcode=WMSException.INVALID_FORMAT,
                              lower=True,
                              permitted_values=["image/png"])
        # Styles
        self.styles = args.get("styles", "").split(",")
        if len(self.styles) != 1:
            raise WMSException("Multi-layer GetMap requests not supported")
        self.style_name = style_r = self.styles[0]
        if not style_r:
            style_r = self.product.default_style
        self.style = self.product.style_index.get(style_r)
Beispiel #18
0
def _get_geobox(args, src_crs, dst_crs=None):
    width = int(args['width'])
    height = int(args['height'])
    minx, miny, maxx, maxy = _get_geobox_xy(args, src_crs)

    if minx == maxx or miny == maxy:
        raise WMSException("Bounding box must enclose a non-zero area")
    if dst_crs is not None:
        minx, miny, maxx, maxy = _bounding_pts(
            minx, miny,
            maxx, maxy,
            width, height,
            src_crs, dst_crs=dst_crs
        )

    out_crs = src_crs if dst_crs is None else dst_crs
    affine = Affine.translation(minx, maxy) * Affine.scale((maxx - minx) / width, (miny - maxy) / height)
    return geometry.GeoBox(width, height, affine, out_crs)
Beispiel #19
0
def _get_geobox(args, src_crs, dst_crs=None):
    width = int(args['width'])
    height = int(args['height'])
    minx, miny, maxx, maxy = _get_geobox_xy(args, src_crs)

    if minx == maxx or miny == maxy:
        raise WMSException("Bounding box must enclose a non-zero area")
    if dst_crs is not None:
        minx, miny, maxx, maxy = _bounding_pts(minx,
                                               miny,
                                               maxx,
                                               maxy,
                                               width,
                                               height,
                                               src_crs,
                                               dst_crs=dst_crs)

    out_crs = src_crs if dst_crs is None else dst_crs
    return create_geobox(out_crs, minx, miny, maxx, maxy, width, height)
Beispiel #20
0
def ogc_svc_impl(svc):
    svc_support = OWS_SUPPORTED.get(svc)
    nocase_args = lower_get_args()
    nocase_args = capture_headers(request, nocase_args)
    service = nocase_args.get("service", svc).upper()

    # Is service activated in config?
    try:
        if not svc_support:
            raise WMSException(f"Invalid service: {svc}",
                               valid_keys=[
                                   service.service
                                   for service in OWS_SUPPORTED.values()
                                   if service.activated()
                               ],
                               code=WMSException.OPERATION_NOT_SUPPORTED,
                               locator="service parameter")
        if not svc_support.activated():
            raise svc_support.default_exception_class(
                "Invalid service and/or request",
                locator="Service and request parameters")

        # Does service match path (if supplied)
        if service != svc_support.service_upper:
            raise svc_support.default_exception_class(
                "Invalid service", locator="Service parameter")

        version = nocase_args.get("version")
        version_support = svc_support.negotiated_version(version)
    except OGCException as e:
        return e.exception_response()

    try:
        return version_support.router(nocase_args)
    except OGCException as e:
        return e.exception_response()
    except Exception as e:  #pylint: disable=broad-except
        tb = sys.exc_info()[2]
        ogc_e = version_support.exception_class("Unexpected server error: %s" %
                                                str(e),
                                                http_response=500)
        return ogc_e.exception_response(traceback=traceback.extract_tb(tb))
Beispiel #21
0
def feature_info(args):
    # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
    # Parse GET parameters
    params = GetFeatureInfoParameters(args)
    feature_json = {}

    geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j)
    # shrink geobox to point
    # Prepare to extract feature info
    if geobox_is_point(params.geobox):
        geo_point_geobox = params.geobox
    else:
        geo_point_geobox = datacube.utils.geometry.GeoBox.from_geopolygon(
            geo_point, params.geobox.resolution, crs=params.geobox.crs)
    tz = tz_for_geometry(geo_point_geobox.geographic_extent)
    stacker = DataStacker(params.product, geo_point_geobox, params.times)
    # --- Begin code section requiring datacube.
    cfg = get_config()
    with cube() as dc:
        if not dc:
            raise WMSException("Database connectivity failure")
        datasets = stacker.datasets(dc.index, all_time=True, point=geo_point)

        # Taking the data as a single point so our indexes into the data should be 0,0
        h_coord = cfg.published_CRSs[params.crsid]["horizontal_coord"]
        v_coord = cfg.published_CRSs[params.crsid]["vertical_coord"]
        s3_bucket = cfg.s3_bucket
        s3_url = cfg.s3_url
        isel_kwargs = {h_coord: 0, v_coord: 0}
        if any(datasets):
            # Group datasets by time, load only datasets that match the idx_date
            global_info_written = False
            feature_json["data"] = []
            fi_date_index = {}
            ds_at_times = collapse_datasets_to_times(datasets, params.times,
                                                     tz)
            # ds_at_times["time"].attrs["units"] = 'seconds since 1970-01-01 00:00:00'
            if ds_at_times:
                data = stacker.data(
                    ds_at_times,
                    skip_corrections=True,
                    manual_merge=params.product.data_manual_merge,
                    fuse_func=params.product.fuse_func)
                for dt in data.time.values:
                    td = data.sel(time=dt)
                    # Global data that should apply to all dates, but needs some data to extract
                    if not global_info_written:
                        global_info_written = True
                        # Non-geographic coordinate systems need to be projected onto a geographic
                        # coordinate system.  Why not use EPSG:4326?
                        # Extract coordinates in CRS
                        data_x = getattr(td, h_coord)
                        data_y = getattr(td, v_coord)

                        x = data_x[isel_kwargs[h_coord]].item()
                        y = data_y[isel_kwargs[v_coord]].item()
                        pt = geometry.point(x, y, params.crs)

                        # Project to EPSG:4326
                        crs_geo = geometry.CRS("EPSG:4326")
                        ptg = pt.to_crs(crs_geo)

                        # Capture lat/long coordinates
                        feature_json["lon"], feature_json["lat"] = ptg.coords[
                            0]

                    date_info = {}

                    ds = ds_at_times.sel(time=dt).values.tolist()[0]
                    if params.product.multi_product:
                        date_info["source_product"] = "%s (%s)" % (
                            ds.type.name, ds.metadata_doc["platform"]["code"])

                    # Extract data pixel
                    pixel_ds = td.isel(**isel_kwargs)

                    # Get accurate timestamp from dataset
                    if params.product.is_raw_time_res:
                        date_info["time"] = dataset_center_time(ds).strftime(
                            "%Y-%m-%d %H:%M:%S UTC")
                    else:
                        date_info["time"] = ds.time.begin.strftime("%Y-%m-%d")
                    # Collect raw band values for pixel and derived bands from styles
                    date_info["bands"] = _make_band_dict(
                        params.product, pixel_ds, stacker.needed_bands())
                    derived_band_dict = _make_derived_band_dict(
                        pixel_ds, params.product.style_index)
                    if derived_band_dict:
                        date_info["band_derived"] = derived_band_dict
                    # Add any custom-defined fields.
                    for k, f in params.product.feature_info_custom_includes.items(
                    ):
                        date_info[k] = f(date_info["bands"])

                    feature_json["data"].append(date_info)
                    fi_date_index[dt] = feature_json["data"][-1]

            my_flags = 0
            if params.product.pq_names == params.product.product_names:
                pq_datasets = ds_at_times
            else:
                pq_datasets = stacker.datasets(dc.index,
                                               mask=True,
                                               all_time=False,
                                               point=geo_point)

            if pq_datasets:
                if not params.product.pq_ignore_time:
                    pq_datasets = collapse_datasets_to_times(
                        pq_datasets, params.times, tz)
                pq_data = stacker.data(pq_datasets, mask=True)
                # feature_json["flags"] = []
                for dt in pq_data.time.values:
                    pqd = pq_data.sel(time=dt)
                    date_info = fi_date_index.get(dt)
                    if date_info:
                        if "flags" not in date_info:
                            date_info["flags"] = {}
                    else:
                        date_info = {"flags": {}}
                        feature_json["data"].append(date_info)
                    pq_pixel_ds = pqd.isel(**isel_kwargs)
                    # PQ flags
                    flags = pq_pixel_ds[params.product.pq_band].item()
                    if not flags & ~params.product.info_mask:
                        my_flags = my_flags | flags
                    else:
                        continue
                    for mk, mv in params.product.flags_def.items():
                        if mk in params.product.ignore_info_flags:
                            continue
                        bits = mv["bits"]
                        values = mv["values"]
                        if isinstance(bits, int):
                            flag = 1 << bits
                            if my_flags & flag:
                                val = values['1']
                            else:
                                val = values['0']
                            date_info["flags"][mk] = val
                        else:
                            try:
                                for i in bits:
                                    if not isinstance(i, int):
                                        raise TypeError()
                                # bits is a list of ints try to do it alos way
                                for key, desc in values.items():
                                    if (isinstance(key, str) and key
                                            == str(my_flags)) or (isinstance(
                                                key, int) and key == my_flags):
                                        date_info["flags"][mk] = desc
                                        break
                            except TypeError:
                                pass
            feature_json["data_available_for_dates"] = []
            for d in datasets.coords["time"].values:
                dt_datasets = datasets.sel(time=d)
                dt = datetime.utcfromtimestamp(d.astype(int) * 1e-9)
                if params.product.is_raw_time_res:
                    dt = solar_date(dt, tz)
                pt_native = None
                for ds in dt_datasets.values.item():
                    if pt_native is None:
                        pt_native = geo_point.to_crs(ds.crs)
                    elif pt_native.crs != ds.crs:
                        pt_native = geo_point.to_crs(ds.crs)
                    if ds.extent and ds.extent.contains(pt_native):
                        feature_json["data_available_for_dates"].append(
                            dt.strftime("%Y-%m-%d"))
                        break
            if ds_at_times:
                feature_json["data_links"] = sorted(
                    get_s3_browser_uris(ds_at_times, pt, s3_url, s3_bucket))
            else:
                feature_json["data_links"] = []
            if params.product.feature_info_include_utc_dates:
                unsorted_dates = []
                for tds in datasets:
                    for ds in tds.values.item():
                        if params.product.time_resolution.is_raw_time_res:
                            unsorted_dates.append(
                                ds.center_time.strftime("%Y-%m-%d"))
                        else:
                            unsorted_dates.append(
                                ds.time.begin.strftime("%Y-%m-%d"))
                feature_json["data_available_for_utc_dates"] = sorted(
                    d.center_time.strftime("%Y-%m-%d") for d in datasets)
    # --- End code section requiring datacube.

    result = {
        "type": "FeatureCollection",
        "features": [{
            "type": "Feature",
            "properties": feature_json
        }]
    }
    return json_response(result, cfg)
Beispiel #22
0
def get_map(args):
    # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
    # Parse GET parameters
    params = GetMapParameters(args)
    qprof = QueryProfiler(params.ows_stats)
    n_dates = len(params.times)
    if n_dates == 1:
        mdh = None
    else:
        mdh = params.style.get_multi_date_handler(n_dates)
        if mdh is None:
            raise WMSException(
                "Style %s does not support GetMap requests with %d dates" %
                (params.style.name, n_dates),
                WMSException.INVALID_DIMENSION_VALUE,
                locator="Time parameter")
    qprof["n_dates"] = n_dates
    with cube() as dc:
        if not dc:
            raise WMSException("Database connectivity failure")
        # Tiling.
        stacker = DataStacker(params.product,
                              params.geobox,
                              params.times,
                              params.resampling,
                              style=params.style)
        zoomed_out = params.zf < params.product.min_zoom
        too_many_datasets = False
        if not zoomed_out:
            qprof.start_event("count-datasets")
            n_datasets = stacker.datasets(dc.index, mode=MVSelectOpts.COUNT)
            qprof.end_event("count-datasets")
            qprof["n_datasets"] = n_datasets
            too_many_datasets = (params.product.max_datasets_wms > 0 and
                                 n_datasets > params.product.max_datasets_wms)
        if too_many_datasets or zoomed_out:
            qprof["too_many_datasets"] = too_many_datasets
            qprof["zoomed_out"] = zoomed_out
            qprof.start_event("extent-in-query")
            extent = stacker.datasets(dc.index, mode=MVSelectOpts.EXTENT)
            qprof.start_event("extent-in-query")
            if extent is None:
                qprof["write_action"] = "No extent: Write Empty"
                qprof.start_event("write")
                body = _write_empty(params.geobox)
                qprof.end_event("write")
            else:
                qprof["write_action"] = "Polygon"
                qprof.start_event("write")
                body = _write_polygon(params.geobox, extent,
                                      params.product.zoom_fill, params.product)
                qprof.end_event("write")
        elif n_datasets == 0:
            qprof["write_action"] = "No datsets: Write Empty"
            qprof.start_event("write")
            body = _write_empty(params.geobox)
            qprof.end_event("write")
        else:
            qprof.start_event("fetch-datasets")
            datasets = stacker.datasets(dc.index)
            qprof.end_event("fetch-datasets")
            _LOG.debug("load start %s %s",
                       datetime.now().time(), args["requestid"])
            qprof.start_event("load-data")
            data = stacker.data(datasets,
                                manual_merge=params.product.data_manual_merge,
                                fuse_func=params.product.fuse_func)
            qprof.end_event("load-data")
            _LOG.debug("load stop %s %s",
                       datetime.now().time(), args["requestid"])
            if params.style.masks:
                if params.product.pq_name == params.product.name:
                    qprof.start_event("build-pq-xarray")
                    pq_band_data = (
                        data[params.product.pq_band].dims,
                        data[params.product.pq_band].astype("uint16"))
                    pq_data = xarray.Dataset(
                        {params.product.pq_band: pq_band_data},
                        coords=data[params.product.pq_band].coords)
                    flag_def = data[params.product.pq_band].flags_definition
                    pq_data[params.product.
                            pq_band].attrs["flags_definition"] = flag_def
                    qprof.end_event("build-pq-xarray")
                else:
                    qprof.start_event("load-pq-xarray")
                    n_pq_datasets = stacker.datasets(
                        dc.index,
                        mask=True,
                        all_time=params.product.pq_ignore_time,
                        mode=MVSelectOpts.COUNT)
                    if n_pq_datasets > 0:
                        pq_datasets = stacker.datasets(
                            dc.index,
                            mask=True,
                            all_time=params.product.pq_ignore_time,
                            mode=MVSelectOpts.DATASETS)
                        pq_data = stacker.data(
                            pq_datasets,
                            mask=True,
                            manual_merge=params.product.pq_manual_merge,
                            fuse_func=params.product.pq_fuse_func)
                    else:
                        pq_data = None
                    qprof.end_event("load-pq-xarray")
                    qprof["n_pq_datasets"] = n_pq_datasets
            else:
                pq_data = None

            qprof.start_event("build-masks")
            td_masks = []
            for npdt in data.time.values:
                td = data.sel(time=npdt)
                td_ext_mask = None
                for band in params.style.needed_bands:
                    if params.product.pq_band != band:
                        if params.product.data_manual_merge:
                            if td_ext_mask is None:
                                td_ext_mask = ~numpy.isnan(td[band])
                            else:
                                td_ext_mask &= ~numpy.isnan(td[band])
                        else:
                            for f in params.product.extent_mask_func:
                                if td_ext_mask is None:
                                    td_ext_mask = f(td, band)
                                else:
                                    td_ext_mask &= f(td, band)
                if params.product.data_manual_merge:
                    td_ext_mask = xarray.DataArray(td_ext_mask)
                td_masks.append(td_ext_mask)
            extent_mask = xarray.concat(td_masks, dim=data.time)
            qprof.end_event("build-masks")

            if not data or (params.style.masks and not pq_data):
                qprof["write_action"] = "No Data: Write Empty"
                body = _write_empty(params.geobox)
            else:
                qprof["write_action"] = "Write Data"
                body = _write_png(data, pq_data, params.style, extent_mask,
                                  params.geobox, qprof)

    if params.ows_stats:
        return json_response(qprof.profile())
    else:
        return png_response(body)
Beispiel #23
0
def get_map(args):
    # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
    # Parse GET parameters
    params = GetMapParameters(args)
    n_dates = len(params.times)
    if n_dates == 1:
        mdh = None
    else:
        mdh = params.style.get_multi_date_handler(n_dates)
        if mdh is None:
            raise WMSException(
                "Style %s does not support GetMap requests with %d dates" %
                (params.style.name, n_dates),
                WMSException.INVALID_DIMENSION_VALUE,
                locator="Time parameter")

    with cube() as dc:
        if not dc:
            raise WMSException("Database connectivity failure")
        # Tiling.
        stacker = DataStacker(params.product,
                              params.geobox,
                              params.times,
                              params.resampling,
                              style=params.style)
        datasets = stacker.datasets(dc.index)
        n_datasets = datasets_in_xarray(datasets)
        zoomed_out = params.zf < params.product.min_zoom
        too_many_datasets = (params.product.max_datasets_wms > 0
                             and n_datasets > params.product.max_datasets_wms)
        if n_datasets == 0:
            body = _write_empty(params.geobox)
        elif too_many_datasets:
            body = _write_polygon(params.geobox, params.geobox.extent,
                                  params.product.zoom_fill)
        elif zoomed_out:
            # Zoomed out to far to properly render data.
            # Construct a polygon which is the union of the extents of the matching datasets.
            extent = None
            extent_crs = None
            for dt in datasets.time.values:
                _dt = str(dt).split('T')[0]
                tds = datasets.sel(time=_dt)
                for ds in tds.values.item():
                    if extent:
                        new_extent = bbox_to_geom(ds.extent.boundingbox,
                                                  ds.extent.crs)
                        if new_extent.crs != extent_crs:
                            new_extent = new_extent.to_crs(extent_crs)
                        extent = extent.union(new_extent)
                    else:
                        extent = bbox_to_geom(ds.extent.boundingbox,
                                              ds.extent.crs)
                        extent_crs = extent.crs
            extent = extent.to_crs(params.crs)
            body = _write_polygon(params.geobox, extent,
                                  params.product.zoom_fill)
        else:
            _LOG.debug("load start %s %s",
                       datetime.now().time(), args["requestid"])
            data = stacker.data(datasets,
                                manual_merge=params.product.data_manual_merge,
                                fuse_func=params.product.fuse_func)
            _LOG.debug("load stop %s %s",
                       datetime.now().time(), args["requestid"])
            if params.style.masks:
                if params.product.pq_name == params.product.name:
                    pq_band_data = (
                        data[params.product.pq_band].dims,
                        data[params.product.pq_band].astype("uint16"))
                    pq_data = xarray.Dataset(
                        {params.product.pq_band: pq_band_data},
                        coords=data[params.product.pq_band].coords)
                    flag_def = data[params.product.pq_band].flags_definition
                    pq_data[params.product.
                            pq_band].attrs["flags_definition"] = flag_def
                else:
                    pq_datasets = stacker.datasets(
                        dc.index,
                        mask=True,
                        all_time=params.product.pq_ignore_time)
                    n_pq_datasets = datasets_in_xarray(pq_datasets)
                    if n_pq_datasets > 0:
                        pq_data = stacker.data(
                            pq_datasets,
                            mask=True,
                            manual_merge=params.product.pq_manual_merge,
                            fuse_func=params.product.pq_fuse_func)
                    else:
                        pq_data = None
            else:
                pq_data = None

            extent_mask = None
            if not params.product.data_manual_merge:
                td_masks = []
                for npdt in data.time.values:
                    td = data.sel(time=npdt)
                    td_ext_mask = None
                    for band in params.style.needed_bands:
                        for f in params.product.extent_mask_func:
                            if td_ext_mask is None:
                                td_ext_mask = f(td, band)
                            else:
                                td_ext_mask &= f(td, band)
                    td_masks.append(td_ext_mask)
                extent_mask = xarray.concat(td_masks, dim=data.time)
                #    extent_mask.add_time(td.time, ext_mask)

            if not data or (params.style.masks and not pq_data):
                body = _write_empty(params.geobox)
            else:
                body = _write_png(data, pq_data, params.style, extent_mask,
                                  params.geobox)

    cfg = get_config()
    return body, 200, cfg.response_headers({"Content-Type": "image/png"})
Beispiel #24
0
def get_map(args):
    # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
    # Parse GET parameters
    params = GetMapParameters(args)
    qprof = QueryProfiler(params.ows_stats)
    n_dates = len(params.times)
    if n_dates == 1:
        mdh = None
    else:
        mdh = params.style.get_multi_date_handler(n_dates)
        if mdh is None:
            raise WMSException(
                "Style %s does not support GetMap requests with %d dates" %
                (params.style.name, n_dates),
                WMSException.INVALID_DIMENSION_VALUE,
                locator="Time parameter")
    qprof["n_dates"] = n_dates
    with cube() as dc:
        if not dc:
            raise WMSException("Database connectivity failure")
        # Tiling.
        stacker = DataStacker(params.product,
                              params.geobox,
                              params.times,
                              params.resampling,
                              style=params.style)
        zoomed_out = params.zf < params.product.min_zoom
        qprof["zoom_factor"] = params.zf
        qprof.start_event("count-datasets")
        n_datasets = stacker.datasets(dc.index, mode=MVSelectOpts.COUNT)
        qprof.end_event("count-datasets")
        qprof["n_datasets"] = n_datasets
        too_many_datasets = (params.product.max_datasets_wms > 0
                             and n_datasets > params.product.max_datasets_wms)
        if qprof.active:
            qprof["datasets"] = stacker.datasets(dc.index,
                                                 mode=MVSelectOpts.IDS)
        if too_many_datasets or zoomed_out:
            stacker.resource_limited = True
            qprof["too_many_datasets"] = too_many_datasets
            qprof["zoomed_out"] = zoomed_out

        if stacker.resource_limited and not params.product.low_res_product_names:
            qprof.start_event("extent-in-query")
            extent = stacker.datasets(dc.index, mode=MVSelectOpts.EXTENT)
            qprof.end_event("extent-in-query")
            if extent is None:
                qprof["write_action"] = "No extent: Write Empty"
                qprof.start_event("write")
                body = _write_empty(params.geobox)
                qprof.end_event("write")
            else:
                qprof["write_action"] = "Polygon"
                qprof.start_event("write")
                body = _write_polygon(params.geobox, extent,
                                      params.product.zoom_fill, params.product)
                qprof.end_event("write")
        elif n_datasets == 0:
            qprof["write_action"] = "No datsets: Write Empty"
            qprof.start_event("write")
            body = _write_empty(params.geobox)
            qprof.end_event("write")
        else:
            if stacker.resource_limited:
                qprof.start_event("count-summary-datasets")
                qprof["n_summary_datasets"] = stacker.datasets(
                    dc.index, mode=MVSelectOpts.COUNT)
                qprof.end_event("count-summary-datasets")
            qprof.start_event("fetch-datasets")
            datasets = stacker.datasets(dc.index)
            for flagband, dss in datasets.items():
                if not dss.any():
                    _LOG.warning("Flag band %s returned no data",
                                 str(flagband))
            qprof.end_event("fetch-datasets")
            _LOG.debug("load start %s %s",
                       datetime.now().time(), args["requestid"])
            qprof.start_event("load-data")
            data = stacker.data(datasets)
            qprof.end_event("load-data")
            _LOG.debug("load stop %s %s",
                       datetime.now().time(), args["requestid"])
            qprof.start_event("build-masks")
            td_masks = []
            for npdt in data.time.values:
                td = data.sel(time=npdt)
                td_ext_mask = None
                for band in params.style.needed_bands:
                    if band not in params.style.flag_bands:
                        if params.product.data_manual_merge:
                            if td_ext_mask is None:
                                td_ext_mask = ~numpy.isnan(td[band])
                            else:
                                td_ext_mask &= ~numpy.isnan(td[band])
                        else:
                            for f in params.product.extent_mask_func:
                                if td_ext_mask is None:
                                    td_ext_mask = f(td, band)
                                else:
                                    td_ext_mask &= f(td, band)
                if params.product.data_manual_merge:
                    td_ext_mask = xarray.DataArray(td_ext_mask)
                td_masks.append(td_ext_mask)
            extent_mask = xarray.concat(td_masks, dim=data.time)
            qprof.end_event("build-masks")

            if not data:
                qprof["write_action"] = "No Data: Write Empty"
                body = _write_empty(params.geobox)
            else:
                qprof["write_action"] = "Write Data"
                body = _write_png(data, params.style, extent_mask,
                                  params.geobox, qprof)

    if params.ows_stats:
        return json_response(qprof.profile())
    else:
        return png_response(
            body,
            extra_headers=params.product.wms_cache_rules.cache_headers(
                n_datasets))