def find_places(ctx: ServiceContext, place_group_id: str, base_url: str, box_coords: str = None, geom_wkt: str = None, query_expr: Any = None, geojson_obj: Dict = None, comb_op: str = "and") -> GeoJsonFeatureCollection: query_geometry = None if box_coords: try: query_geometry = get_box_split_bounds_geometry(*[float(s) for s in box_coords.split(",")]) except (TypeError, ValueError) as e: raise ServiceBadRequestError("Received invalid bounding box geometry") from e elif geom_wkt: try: query_geometry = shapely.wkt.loads(geom_wkt) except (TypeError, WKTReadingError) as e: raise ServiceBadRequestError("Received invalid geometry WKT") from e elif geojson_obj: try: if geojson_obj["type"] == "FeatureCollection": query_geometry = shapely.geometry.shape(geojson_obj["features"][0]["geometry"]) elif geojson_obj["type"] == "Feature": query_geometry = shapely.geometry.shape(geojson_obj["geometry"]) else: query_geometry = shapely.geometry.shape(geojson_obj) except (IndexError, ValueError, KeyError) as e: raise ServiceBadRequestError("Received invalid GeoJSON object") from e return _find_places(ctx, place_group_id, base_url, query_geometry, query_expr, comb_op)
def _get_key_and_local_path(self, ds_id: str, path: str): descriptor = self.service_context.get_dataset_descriptor(ds_id) file_system = descriptor.get('FileSystem', 'local') required_file_system = 'local' if file_system != required_file_system: raise ServiceBadRequestError( f'AWS S3 data access: currently, only datasets in' f' file system {required_file_system!r} are supported,' f' but dataset {ds_id!r} uses file system {file_system!r}') key = f'{ds_id}/{path}' # validate path if path and '..' in path.split('/'): raise ServiceBadRequestError( f'AWS S3 data access: received illegal key {key!r}') local_path = descriptor.get('Path') if os.path.isabs(local_path): local_path = os.path.join(local_path, path) else: local_path = os.path.join(self.service_context.base_dir, local_path, path) local_path = os.path.normpath(local_path) return key, pathlib.Path(local_path)
def get_time_series_for_feature_collection(ctx: ServiceContext, ds_name: str, var_name: str, feature_collection: Dict, start_date: np.datetime64 = None, end_date: np.datetime64 = None, include_count: bool = False, include_stdev: bool = False, max_valids: int = None) -> Dict: """ Get the time-series for the geometries of a given *feature_collection*. :param ctx: Service context object :param ds_name: The dataset identifier. :param var_name: The variable name. :param feature_collection: The feature collection. :param start_date: An optional start date. :param end_date: An optional end date. :param include_count: Whether to include the valid number of observations in the result. :param include_stdev: Whether to include the standard deviation in the result. :param max_valids: Optional number of valid points. If it is None (default), also missing values are returned as NaN; if it is -1 only valid values are returned; if it is a positive integer, the most recent valid values are returned. :return: Time-series data structure. """ dataset = ctx.get_time_series_dataset(ds_name, var_name=var_name) features = GeoJSON.get_feature_collection_features(feature_collection) if features is None: raise ServiceBadRequestError("Invalid GeoJSON feature collection") shapes = [] for feature in features: geometry = GeoJSON.get_feature_geometry(feature) try: geometry = shapely.geometry.shape(geometry) except (TypeError, ValueError) as e: raise ServiceBadRequestError( "Invalid GeoJSON feature collection") from e shapes.append(geometry) with measure_time() as time_result: result = _get_time_series_for_geometries(dataset, var_name, shapes, start_date=start_date, end_date=end_date, include_count=include_count, include_stdev=include_stdev, max_valids=max_valids) if ctx.trace_perf: LOG.info( f'get_time_series_for_feature_collection: dataset id {ds_name}, variable {var_name},' f'size={len(result["results"])}, took {time_result.duration} seconds' ) return result
def test_same_base_type(self): self.assertIsInstance(ServiceError(''), HTTPError) self.assertEqual(500, ServiceError('').status_code) self.assertEqual(503, ServiceError('', status_code=503).status_code) self.assertIsInstance(ServiceConfigError(''), ServiceError) self.assertEqual(500, ServiceConfigError('').status_code) self.assertIsInstance(ServiceBadRequestError(''), ServiceError) self.assertEqual(400, ServiceBadRequestError('').status_code) self.assertIsInstance(ServiceResourceNotFoundError(''), ServiceError) self.assertEqual(404, ServiceResourceNotFoundError('').status_code)
def get_body_as_json_object(self, name="JSON object"): """ Get the body argument as JSON object. """ try: return tornado.escape.json_decode(self.request.body) except (JSONDecodeError, TypeError, ValueError) as e: raise ServiceBadRequestError( f"Invalid or missing {name} in request body") from e
async def get(self): prefix = self.get_query_argument('prefix', default=None) delimiter = self.get_query_argument('delimiter', default=None) max_keys = int(self.get_query_argument('max-keys', default='1000')) list_s3_bucket_params = dict(prefix=prefix, delimiter=delimiter, max_keys=max_keys) list_type = self.get_query_argument('list-type', default=None) if list_type is None: marker = self.get_query_argument('marker', default=None) list_s3_bucket_params.update(marker=marker) list_s3_bucket = list_s3_bucket_v1 elif list_type == '2': start_after = self.get_query_argument('start-after', default=None) continuation_token = self.get_query_argument('continuation-token', default=None) list_s3_bucket_params.update(start_after=start_after, continuation_token=continuation_token) list_s3_bucket = list_s3_bucket_v2 else: raise ServiceBadRequestError(f'Unknown bucket list type {list_type!r}') if _LOG_S3BUCKET_HANDLER: LOG.info(f'GET: list_s3_bucket_params={list_s3_bucket_params}') bucket_mapping = self.service_context.get_s3_bucket_mapping() list_bucket_result = list_s3_bucket(bucket_mapping, **list_s3_bucket_params) if _LOG_S3BUCKET_HANDLER: import json LOG.info(f'-->\n{json.dumps(list_bucket_result, indent=2)}') xml = list_bucket_result_to_xml(list_bucket_result) self.set_header('Content-Type', 'application/xml') self.write(xml) await self.flush()
def get_color_bars(ctx: ServiceContext, mime_type: str) -> str: cmaps = get_cmaps() if mime_type == 'application/json': return json.dumps(cmaps, indent=2) elif mime_type == 'text/html': html_head = '<!DOCTYPE html>\n' + \ '<html lang="en">\n' + \ '<head>' + \ '<meta charset="UTF-8">' + \ '<title>xcube server color maps</title>' + \ '</head>\n' + \ '<body style="padding: 0.2em">\n' html_body = '' html_foot = '</body>\n' \ '</html>\n' for cmap_cat, cmap_desc, cmap_bars in cmaps: html_body += ' <h2>%s</h2>\n' % cmap_cat html_body += ' <p><i>%s</i></p>\n' % cmap_desc html_body += ' <table style=border: 0">\n' for cmap_bar in cmap_bars: cmap_name, cmap_data = cmap_bar cmap_image = f'<img src="data:image/png;base64,{cmap_data}" width="100%%" height="32"/>' html_body += f' <tr><td style="width: 5em">{cmap_name}:' \ f'</td><td style="width: 40em">{cmap_image}</td></tr>\n' html_body += ' </table>\n' return html_head + html_body + html_foot raise ServiceBadRequestError( f'Format {mime_type!r} not supported for color bars')
def get_ne2_tile_grid(ctx: ServiceContext, tile_client: str, base_url: str): if tile_client == 'ol4': return get_tile_source_options(NaturalEarth2Image.get_pyramid().tile_grid, get_ne2_tile_url(ctx, base_url), client=tile_client) else: raise ServiceBadRequestError(f'Unknown tile client {tile_client!r}')
def get_time_series_for_geometry_collection(ctx: ServiceContext, ds_name: str, var_name: str, geometry_collection: Dict, start_date: np.datetime64 = None, end_date: np.datetime64 = None, include_count: bool = False, include_stdev: bool = False, max_valids: int = None) -> Dict: """ Get the time-series for a given *geometry_collection*. :param ctx: Service context object :param ds_name: The dataset identifier. :param var_name: The variable name. :param geometry_collection: The geometry collection. :param start_date: An optional start date. :param end_date: An optional end date. :param include_count: Whether to include the valid number of observations in the result. :param include_stdev: Whether to include the standard deviation in the result. :param max_valids: Optional number of valid points. If it is None (default), also missing values are returned as NaN; if it is -1 only valid values are returned; if it is a positive integer, the most recent valid values are returned. :return: Time-series data structure. """ dataset = _get_time_series_dataset(ctx, ds_name, var_name) geometries = GeoJSON.get_geometry_collection_geometries( geometry_collection) if geometries is None: raise ServiceBadRequestError("Invalid GeoJSON geometry collection") shapes = [] for geometry in geometries: try: geometry = shapely.geometry.shape(geometry) except (TypeError, ValueError) as e: raise ServiceBadRequestError( "Invalid GeoJSON geometry collection") from e shapes.append(geometry) return _get_time_series_for_geometries(dataset, var_name, shapes, start_date=start_date, end_date=end_date, include_count=include_count, include_stdev=include_stdev, max_valids=max_valids)
def to_datetime(cls, name: str, value: str) -> np.datetime64: """ Convert str value to date/time value. :param name: Name of the value :param value: The string value :return: The date/time value :raise: ServiceBadRequestError """ if value is None: raise ServiceBadRequestError( f'Parameter "{name}" must be a date/time, but none was given') try: return np.datetime64(value) except ValueError as e: raise ServiceBadRequestError( f'Parameter "{name}" must be a date/time, but was {value!r}' ) from e
def to_float(cls, name: str, value: str) -> float: """ Convert str value to float. :param name: Name of the value :param value: The string value :return: The float value :raise: ServiceBadRequestError """ if value is None: raise ServiceBadRequestError( f'Parameter "{name}" must be a number, but none was given') try: return float(value) except ValueError as e: raise ServiceBadRequestError( f'Parameter "{name}" must be a number, but was {value!r}' ) from e
def get_query_argument( self, name: str, default: Optional[str] = UNDEFINED) -> Optional[str]: value = self.kvp.get(name, default) if value is UNDEFINED: raise ServiceBadRequestError(f'Missing query parameter "{name}"') return value
def get_dataset_tile_grid(ctx: ServiceContext, ds_id: str, var_name: str, tile_client: str, base_url: str) -> Dict[str, Any]: tile_grid = ctx.get_tile_grid(ds_id) if tile_client == 'ol4' or tile_client == 'cesium': return get_tile_source_options(tile_grid, get_dataset_tile_url( ctx, ds_id, var_name, base_url), client=tile_client) else: raise ServiceBadRequestError(f'Unknown tile client "{tile_client}"')
async def get(self): # According to WMTS 1.0 spec, query parameters must be case-insensitive. self.set_caseless_query_arguments() service = self.params.get_query_argument('service') if service != "WMTS": raise ServiceBadRequestError( 'Value for "service" parameter must be "WMTS"') request = self.params.get_query_argument('request') if request == "GetCapabilities": version = self.params.get_query_argument("version", _WMTS_VERSION) if version != _WMTS_VERSION: raise ServiceBadRequestError( f'Value for "version" parameter must be "{_WMTS_VERSION}"') capabilities = await IOLoop.current().run_in_executor( None, get_wmts_capabilities_xml, self.service_context, self.base_url) self.set_header("Content-Type", "application/xml") # TODO: await self.finish(capabilities) self.finish(capabilities) elif request == "GetTile": version = self.params.get_query_argument("version", _WMTS_VERSION) if version != _WMTS_VERSION: raise ServiceBadRequestError( f'Value for "version" parameter must be "{_WMTS_VERSION}"') layer = self.params.get_query_argument("layer") try: ds_id, var_name = layer.split(".") except ValueError as e: raise ServiceBadRequestError( 'Value for "layer" parameter must be "<dataset>.<variable>"' ) from e # The following parameters are mandatory s prescribed by WMTS spec, but we don't need them # tileMatrixSet = self.params.get_query_argument_int('tilematrixset') # style = self.params.get_query_argument("style" mime_type = self.params.get_query_argument( "format", _WMTS_TILE_FORMAT).lower() if mime_type not in (_WMTS_TILE_FORMAT, "png"): raise ServiceBadRequestError( f'Value for "format" parameter must be "{_WMTS_TILE_FORMAT}"' ) x = self.params.get_query_argument_int("tilecol") y = self.params.get_query_argument_int("tilerow") z = self.params.get_query_argument_int("tilematrix") tile = await IOLoop.current().run_in_executor( None, get_dataset_tile, self.service_context, ds_id, var_name, x, y, z, self.params) self.set_header("Content-Type", "image/png") # TODO: await self.finish(capabilities) self.finish(tile) elif request == "GetFeatureInfo": raise ServiceBadRequestError( 'Request type "GetFeatureInfo" not yet implemented') else: raise ServiceBadRequestError(f'Invalid request type "{request}"')
def _to_shapely_geometries( geo_json_geometries: List[GeoJsonGeometry] ) -> List[shapely.geometry.base.BaseGeometry]: geometries = [] for geo_json_geometry in geo_json_geometries: try: geometry = shapely.geometry.shape(geo_json_geometry) except (TypeError, ValueError) as e: raise ServiceBadRequestError( "Invalid GeoJSON geometry encountered") from e geometries.append(geometry) return geometries
def get_query_argument(self, name: str, default: Optional[str] = UNDEFINED) -> Optional[str]: """ Get query argument. :param name: Query argument name :param default: Default value. :return: the value or none :raise: ServiceBadRequestError """ value = self.handler.get_query_argument(name, default=default) if value == UNDEFINED: raise ServiceBadRequestError(f'Missing query parameter "{name}"') return value
def get_var_indexers(cls, ds_name: str, var_name: str, var: xr.DataArray, dim_names: List[str], params: RequestParams) -> Dict[str, Any]: var_indexers = dict() for dim_name in dim_names: if dim_name not in var.coords: raise ServiceBadRequestError( f'dimension {dim_name!r} of variable {var_name!r} of dataset {ds_name!r} has no coordinates' ) coord_var = var.coords[dim_name] dim_value_str = params.get_query_argument(dim_name, None) try: if dim_value_str is None: var_indexers[dim_name] = coord_var.values[0] elif dim_value_str == 'current': var_indexers[dim_name] = coord_var.values[-1] elif np.issubdtype(coord_var.dtype, np.floating): var_indexers[dim_name] = float(dim_value_str) elif np.issubdtype(coord_var.dtype, np.integer): var_indexers[dim_name] = int(dim_value_str) elif np.issubdtype(coord_var.dtype, np.datetime64): if '/' in dim_value_str: date_str_1, date_str_2 = dim_value_str.split( '/', maxsplit=1) var_indexer_1 = pd.to_datetime(date_str_1) var_indexer_2 = pd.to_datetime(date_str_2) var_indexers[dim_name] = var_indexer_1 + ( var_indexer_2 - var_indexer_1) / 2 else: date_str = dim_value_str var_indexers[dim_name] = pd.to_datetime(date_str) else: raise ValueError( f'unable to convert value {dim_value_str!r} to {coord_var.dtype!r}' ) except ValueError as e: raise ServiceBadRequestError( f'{dim_value_str!r} is not a valid value for dimension {dim_name!r} ' f'of variable {var_name!r} of dataset {ds_name!r}') from e return var_indexers
def get(self, place_group_id: str): query_expr = self.params.get_query_argument("query", None) geom_wkt = self.params.get_query_argument("geom", None) box_coords = self.params.get_query_argument("bbox", None) comb_op = self.params.get_query_argument("comb", "and") if geom_wkt and box_coords: raise ServiceBadRequestError('Only one of "geom" and "bbox" may be given') response = find_places(self.service_context, place_group_id, self.base_url, geom_wkt=geom_wkt, box_coords=box_coords, query_expr=query_expr, comb_op=comb_op) self.set_header('Content-Type', "application/json") self.write(json.dumps(response, indent=2))
def _get_key_and_local_path(self, ds_id: str, path: str): dataset_config = self.service_context.get_dataset_config(ds_id) file_system = dataset_config.get('FileSystem', 'file') required_file_systems = ['file', 'local'] if file_system not in required_file_systems: required_file_system_string = " or ".join(required_file_systems) raise ServiceBadRequestError( f'AWS S3 data access: currently, only datasets in file systems ' f'{required_file_system_string!r} are supported, but dataset ' f'{ds_id!r} uses file system {file_system!r}') key = f'{ds_id}/{path}' # validate path if path and '..' in path.split('/'): raise ServiceBadRequestError(f'AWS S3 data access: received illegal key {key!r}') bucket_mapping = self.service_context.get_s3_bucket_mapping() local_path = bucket_mapping.get(ds_id) local_path = os.path.join(local_path, path) local_path = os.path.normpath(local_path) return key, pathlib.Path(local_path)
def get_time_series_for_geometry(ctx: ServiceContext, ds_name: str, var_name: str, geometry: Dict, start_date: np.datetime64 = None, end_date: np.datetime64 = None, include_count: bool = False, include_stdev: bool = False, max_valids: int = None) -> Dict: """ Get the time-series for a given *geometry*. :param ctx: Service context object :param ds_name: The dataset identifier. :param var_name: The variable name. :param geometry: The geometry, usually a point or polygon. :param start_date: An optional start date. :param end_date: An optional end date. :param include_count: Whether to include the valid number of observations in the result. :param include_stdev: Whether to include the standard deviation in the result. :param max_valids: Optional number of valid points. If it is None (default), also missing values are returned as NaN; if it is -1 only valid values are returned; if it is a positive integer, the most recent valid values are returned. :return: Time-series data structure. """ dataset = ctx.get_time_series_dataset(ds_name, var_name=var_name) if not GeoJSON.is_geometry(geometry): raise ServiceBadRequestError("Invalid GeoJSON geometry") if isinstance(geometry, dict): geometry = shapely.geometry.shape(geometry) with measure_time() as time_result: result = _get_time_series_for_geometry(dataset, var_name, geometry, start_date=start_date, end_date=end_date, include_count=include_count, include_stdev=include_stdev, max_valids=max_valids) if ctx.trace_perf: LOG.info( f'get_time_series_for_geometry: dataset id {ds_name}, variable {var_name}, ' f'geometry type {geometry},' f'size={len(result["results"])}, took {time_result.duration} seconds' ) return result
def get_query_argument_point( self, name: str, default: Optional[Tuple[float, float]] = UNDEFINED ) -> Optional[Tuple[float, float]]: value = self.get_query_argument(name, default=default) if value is None: return default if not isinstance(value, str): return value try: x, y = map(float, value.split(',')) return x, y except ValueError: raise ServiceBadRequestError( f"Parameter {name!r} parameter must be a point using format '<lon>,<lat>'," f" but was {value!r}")
def _to_geo_json_geometries( geo_json: GeoJsonObj) -> Tuple[List[GeoJsonGeometry], bool]: is_collection = False if GeoJSON.is_feature(geo_json): geometry = _get_feature_geometry(geo_json) geometries = [geometry] elif GeoJSON.is_feature_collection(geo_json): is_collection = True features = GeoJSON.get_feature_collection_features(geo_json) geometries = [_get_feature_geometry(feature) for feature in features] if features else [] elif GeoJSON.is_geometry_collection(geo_json): is_collection = True geometries = GeoJSON.get_geometry_collection_geometries(geo_json) elif GeoJSON.is_geometry(geo_json): geometries = [geo_json] else: raise ServiceBadRequestError("GeoJSON object expected") return geometries, is_collection
def get_dataset_tile(ctx: ServiceContext, ds_id: str, var_name: str, x: str, y: str, z: str, params: RequestParams): x = RequestParams.to_int('x', x) y = RequestParams.to_int('y', y) z = RequestParams.to_int('z', z) tile_comp_mode = params.get_query_argument_int('mode', ctx.tile_comp_mode) trace_perf = params.get_query_argument_int('debug', ctx.trace_perf) != 0 measure_time = measure_time_cm(logger=_LOG, disabled=not trace_perf) var = ctx.get_variable_for_z(ds_id, var_name, z) dim_names = list(var.dims) if 'lon' not in dim_names or 'lat' not in dim_names: raise ServiceBadRequestError( f'Variable "{var_name}" of dataset "{ds_id}" is not geo-spatial') dim_names.remove('lon') dim_names.remove('lat') var_indexers = ctx.get_var_indexers(ds_id, var_name, var, dim_names, params) cmap_cbar = params.get_query_argument('cbar', default=None) cmap_vmin = params.get_query_argument_float('vmin', default=None) cmap_vmax = params.get_query_argument_float('vmax', default=None) if cmap_cbar is None or cmap_vmin is None or cmap_vmax is None: default_cmap_cbar, default_cmap_vmin, default_cmap_vmax = ctx.get_color_mapping( ds_id, var_name) cmap_cbar = cmap_cbar or default_cmap_cbar cmap_vmin = cmap_vmin or default_cmap_vmin cmap_vmax = cmap_vmax or default_cmap_vmax image_id = '-'.join( map(str, [ds_id, z, var_name, cmap_cbar, cmap_vmin, cmap_vmax] + [ f'{dim_name}={dim_value}' for dim_name, dim_value in var_indexers.items() ])) if image_id in ctx.image_cache: image = ctx.image_cache[image_id] else: no_data_value = var.attrs.get('_FillValue') valid_range = var.attrs.get('valid_range') if valid_range is None: valid_min = var.attrs.get('valid_min') valid_max = var.attrs.get('valid_max') if valid_min is not None and valid_max is not None: valid_range = [valid_min, valid_max] # Make sure we work with 2D image arrays only if var.ndim == 2: assert len(var_indexers) == 0 array = var elif var.ndim > 2: assert len(var_indexers) == var.ndim - 2 array = var.sel(method='nearest', **var_indexers) else: raise ServiceBadRequestError( f'Variable "{var_name}" of dataset "{var_name}" ' 'must be an N-D Dataset with N >= 2, ' f'but "{var_name}" is only {var.ndim}-D') cmap_vmin = np.nanmin( array.values) if np.isnan(cmap_vmin) else cmap_vmin cmap_vmax = np.nanmax( array.values) if np.isnan(cmap_vmax) else cmap_vmax tile_grid = ctx.get_tile_grid(ds_id) if not tile_comp_mode: image = NdarrayImage( array, image_id=f'ndai-{image_id}', tile_size=tile_grid.tile_size, # tile_cache=ctx.tile_cache, trace_perf=trace_perf) image = TransformArrayImage( image, image_id=f'tai-{image_id}', flip_y=tile_grid.inv_y, force_masked=True, no_data_value=no_data_value, valid_range=valid_range, # tile_cache=ctx.tile_cache, trace_perf=trace_perf) image = ColorMappedRgbaImage(image, image_id=f'rgb-{image_id}', value_range=(cmap_vmin, cmap_vmax), cmap_name=cmap_cbar, encode=True, format='PNG', tile_cache=ctx.tile_cache, trace_perf=trace_perf) else: image = ColorMappedRgbaImage2(array, image_id=f'rgb-{image_id}', tile_size=tile_grid.tile_size, cmap_range=(cmap_vmin, cmap_vmax), cmap_name=cmap_cbar, encode=True, format='PNG', flip_y=tile_grid.inv_y, no_data_value=no_data_value, valid_range=valid_range, tile_cache=ctx.tile_cache, trace_perf=trace_perf) ctx.image_cache[image_id] = image if trace_perf: _LOG.info( f'Created tiled image {image_id!r} of size {image.size} with tile grid:' ) _LOG.info(f' num_levels: {tile_grid.num_levels}') _LOG.info(f' num_level_zero_tiles: {tile_grid.num_tiles(0)}') _LOG.info(f' tile_size: {tile_grid.tile_size}') _LOG.info(f' geo_extent: {tile_grid.geo_extent}') _LOG.info(f' inv_y: {tile_grid.inv_y}') if trace_perf: _LOG.info(f'>>> tile {image_id}/{z}/{y}/{x}') with measure_time() as measured_time: tile = image.get_tile(x, y) if trace_perf: _LOG.info(f'<<< tile {image_id}/{z}/{y}/{x}: took ' + '%.2f seconds' % measured_time.duration) return tile
def _get_feature_geometry(feature: GeoJsonFeature) -> GeoJsonGeometry: geometry = GeoJSON.get_feature_geometry(feature) if geometry is None or not GeoJSON.is_geometry(geometry): raise ServiceBadRequestError("GeoJSON feature without geometry") return geometry
def _check_max_valids(max_valids): if not (max_valids is None or max_valids == -1 or max_valids > 0): raise ServiceBadRequestError( 'If given, query parameter "maxValids" must be -1 or positive')
def get_dataset_tile(ctx: ServiceContext, ds_id: str, var_name: str, x: str, y: str, z: str, params: RequestParams): x = RequestParams.to_int('x', x) y = RequestParams.to_int('y', y) z = RequestParams.to_int('z', z) tile_comp_mode = params.get_query_argument_int('mode', ctx.tile_comp_mode) trace_perf = params.get_query_argument_int('debug', ctx.trace_perf) != 0 ml_dataset = ctx.get_ml_dataset(ds_id) if var_name == 'rgb': norm_vmin = params.get_query_argument_float('vmin', default=0.0) norm_vmax = params.get_query_argument_float('vmax', default=1.0) var_names, norm_ranges = ctx.get_rgb_color_mapping( ds_id, norm_range=(norm_vmin, norm_vmax)) components = ('r', 'g', 'b') for i in range(3): c = components[i] var_names[i] = params.get_query_argument(c, default=var_names[i]) norm_ranges[i] = params.get_query_argument_float(f'{c}vmin', default=norm_ranges[i][0]), \ params.get_query_argument_float(f'{c}vmax', default=norm_ranges[i][1]) cmap_name = tuple(var_names) cmap_range = tuple(norm_ranges) for name in var_names: if name and name not in ml_dataset.base_dataset: raise ServiceBadRequestError( f'Variable {name!r} not found in dataset {ds_id!r}') var = None for name in var_names: if name and name in ml_dataset.base_dataset: var = ml_dataset.base_dataset[name] break if var is None: raise ServiceBadRequestError( f'No variable in dataset {ds_id!r} specified for RGB') else: cmap_name = params.get_query_argument('cbar', default=None) cmap_vmin = params.get_query_argument_float('vmin', default=None) cmap_vmax = params.get_query_argument_float('vmax', default=None) if cmap_name is None or cmap_vmin is None or cmap_vmax is None: default_cmap_name, (default_cmap_vmin, default_cmap_vmax) = ctx.get_color_mapping( ds_id, var_name) cmap_name = cmap_name or default_cmap_name cmap_vmin = cmap_vmin or default_cmap_vmin cmap_vmax = cmap_vmax or default_cmap_vmax cmap_range = cmap_vmin, cmap_vmax if var_name not in ml_dataset.base_dataset: raise ServiceBadRequestError( f'Variable {var_name!r} not found in dataset {ds_id!r}') var = ml_dataset.base_dataset[var_name] labels = parse_non_spatial_labels(params.get_query_arguments(), var.dims, var.coords, allow_slices=False, exception_type=ServiceBadRequestError) return get_ml_dataset_tile(ml_dataset, var_name, x, y, z, labels=labels, cmap_name=cmap_name, cmap_range=cmap_range, image_cache=ctx.image_cache, tile_cache=ctx.tile_cache, tile_comp_mode=tile_comp_mode, trace_perf=trace_perf, exception_type=ServiceBadRequestError)