def _fill_domain_axes(self): for axis in self.axes: if axis.is_easting(): east_axis = RegularAxis(axis.label, axis.uom, self.gdal_dataset.get_extents_x()[0], self.gdal_dataset.get_extents_x()[1], self.gdal_dataset.get_origin_x(), axis) self.subsets.append( AxisSubset( CoverageAxis(east_axis, None, True), Interval(self.gdal_dataset.get_extents_x()[0], self.gdal_dataset.get_extents_x()[1]))) elif axis.is_northing(): north_axis = RegularAxis(axis.label, axis.uom, self.gdal_dataset.get_extents_y()[0], self.gdal_dataset.get_extents_y()[1], self.gdal_dataset.get_origin_y(), axis) self.subsets.append( AxisSubset( CoverageAxis(north_axis, None, True), Interval(self.gdal_dataset.get_extents_y()[0], self.gdal_dataset.get_extents_y()[1]))) else: unknown_axis = Axis(axis.label, axis.uom, 0, 0, 0, axis) self.subsets.append( AxisSubset(CoverageAxis(unknown_axis, None, False), Interval(0)))
def __init__(self, name, resolution, order, min, max=None, type=UserAxisType.NUMBER, dataBound=True, statements=[]): """ A user axis is a an axis containing information collected from an user. The connection to the crs axis can be done by checking the axis name :param str name: The name of the axis :param str | float resolution: the resolution of the axis :param int order: the order of this geo axis in the grid. For example EPSG:4326 has geo axes Lat Long which correspond to grid axes y, x; However, inside the data container (file) the grid axes are actually x, y. In this case the order of the Lat axis is 1 and the order of the Long axis is 0 :param str | float min: the minimum on this axis :param str | float | None max: the maximum on this axis :param str type: the type of the values on this axis :param array statements: an array of statements to be executed before expression evaluation, e.g. import """ self.name = name self.resolution = resolution self.order = order self.interval = Interval(min, max) self.type = type self.dataBound = dataBound self.statements = statements
def _axis_subset(self, crs_axis, nc_file): """ Returns an axis subset using the given crs axis in the context of the nc file :param CRSAxis crs_axis: the crs definition of the axis :param File nc_file: the netcdf file :rtype AxisSubset """ user_axis = self._user_axis(self._get_user_axis_by_crs_axis_name(crs_axis.label), NetcdfEvaluatorSlice(nc_file)) # Normally, without pixelIsPoint:true, in the ingredient needs to +/- 0.5 * resolution for each regular axis # e.g: resolution for axis E is 10000, then # "min": "${netcdf:variable:E:min} - 10000 / 2", # "max": "${netcdf:variable:E:max} + 10000 / 2", # with pixelIsPoint: true, no need to add these values as the service will do it automatically if self.pixel_is_point: PointPixelAdjuster.adjust_axis_bounds_to_continuous_space(user_axis, crs_axis) else: # No adjustment for all regular axes but still need to translate time in datetime to decimal to calculate if user_axis.type == UserAxisType.DATE: user_axis.interval.low = decimal.Decimal(str(arrow.get(user_axis.interval.low).float_timestamp)) if user_axis.interval.high: user_axis.interval.high = decimal.Decimal(str(arrow.get(user_axis.interval.high).float_timestamp)) # if low < high, adjust it if user_axis.interval.high is not None and user_axis.interval.low > user_axis.interval.high: user_axis.interval.low, user_axis.interval.high = user_axis.interval.high, user_axis.interval.low high = user_axis.interval.high if user_axis.interval.high else user_axis.interval.low origin = PointPixelAdjuster.get_origin(user_axis, crs_axis) if isinstance(user_axis, RegularUserAxis): geo_axis = RegularAxis(crs_axis.label, crs_axis.uom, user_axis.interval.low, high, origin, crs_axis) else: if user_axis.type == UserAxisType.DATE: if crs_axis.is_uom_day(): coefficients = self._translate_day_date_direct_position_to_coefficients(user_axis.interval.low, user_axis.directPositions) else: coefficients = self._translate_seconds_date_direct_position_to_coefficients(user_axis.interval.low, user_axis.directPositions) else: coefficients = self._translate_number_direct_position_to_coefficients(user_axis.interval.low, user_axis.directPositions) geo_axis = IrregularAxis(crs_axis.label, crs_axis.uom, user_axis.interval.low, high, origin, coefficients, crs_axis) grid_low = 0 grid_high = PointPixelAdjuster.get_grid_points(user_axis, crs_axis) # NOTE: Grid Coverage uses the direct intervals as in Rasdaman if self.grid_coverage is False and grid_high > grid_low: grid_high -= 1 grid_axis = GridAxis(user_axis.order, crs_axis.label, user_axis.resolution, grid_low, grid_high) if user_axis.type == UserAxisType.DATE: self._translate_decimal_to_datetime(user_axis, geo_axis) return AxisSubset(CoverageAxis(geo_axis, grid_axis, user_axis.dataBound), Interval(user_axis.interval.low, user_axis.interval.high))
def _get_geo_coords(self, root): """ Returns the raster coordinates as a list in the order of the geo axes of the crs :param root: the xml root :rtype: list[Interval] """ geo_low = root.xpath( "//gml:Envelope/gml:lowerCorner", namespaces=self._get_ns())[0].text.strip().split(" ") geo_high = root.xpath( "//gml:Envelope/gml:upperCorner", namespaces=self._get_ns())[0].text.strip().split(" ") geo = [] for index in range(0, len(geo_low)): geo.append(Interval(geo_low[index], geo_high[index])) return geo
def _get_raster_coords(self, root): """ Returns the raster coordinates as a list in the order of the grid axes (not necesarily the order of the geo axes) :param root: the xml root :rtype: list[Interval] """ raster_low = root.xpath( "//gml:GridEnvelope/gml:low", namespaces=self._get_ns())[0].text.strip().split(" ") raster_high = root.xpath( "//gml:GridEnvelope/gml:high", namespaces=self._get_ns())[0].text.strip().split(" ") raster = [] for index in range(0, len(raster_low)): raster.append(Interval(raster_low[index], raster_high[index])) return raster
def _get_coverage_url(self, axis_subsets): """ Returns a get coverage request for the given coverage with the given subsets :param list[AxisSubset] axis_subsets: a list of axis subsets :rtype: str """ subsets = [] format = "&format=image/tiff&" if len(axis_subsets) > 1 else "&" for axis_subset in axis_subsets: error_correction = ( float(axis_subset.coverage_axis.grid_axis.resolution) / 2) if ConfigManager.subset_correction else 0 high = axis_subset.interval.high - error_correction if axis_subset.interval.high is not None else None new_interval = Interval( axis_subset.interval.low + error_correction, high) subsets.append("subset=" + axis_subset.coverage_axis.axis.label + "(" + str(new_interval) + ")") return self.wcs_url + "?service=WCS&version=2.0.1&request=GetCoverage&coverageId=" + \ self.coverage_id + format + "&".join(subsets)
def _axis_subset(self, crs_axis, evaluator_slice, resolution=None): """ Returns an axis subset using the given crs axis in the context of the gdal file :param CRSAxis crs_axis: the crs definition of the axis :param GDALEvaluatorSlice evaluator_slice: the evaluator for GDAL file :param resolution: Known axis resolution, no need to evaluate sentence expression from ingredient file (e.g: Sentinel2 recipe) :rtype AxisSubset """ user_axis = self._user_axis( self._get_user_axis_by_crs_axis_name(crs_axis.label), evaluator_slice) if resolution is not None: user_axis.resolution = resolution high = user_axis.interval.high if user_axis.interval.high is not None else user_axis.interval.low if user_axis.type == UserAxisType.DATE: # it must translate datetime string to float by arrow for calculating later user_axis.interval.low = arrow.get( user_axis.interval.low).float_timestamp if user_axis.interval.high is not None: user_axis.interval.high = arrow.get( user_axis.interval.high).float_timestamp if isinstance(user_axis, RegularUserAxis): geo_axis = RegularAxis(crs_axis.label, crs_axis.uom, user_axis.interval.low, high, user_axis.interval.low, crs_axis) else: # Irregular axis (coefficients must be number, not datetime string) if user_axis.type == UserAxisType.DATE: if crs_axis.is_time_day_axis(): coefficients = self._translate_day_date_direct_position_to_coefficients( user_axis.interval.low, user_axis.directPositions) else: coefficients = self._translate_seconds_date_direct_position_to_coefficients( user_axis.interval.low, user_axis.directPositions) else: coefficients = self._translate_number_direct_position_to_coefficients( user_axis.interval.low, user_axis.directPositions) self._update_for_slice_group_size(self.coverage_id, user_axis, crs_axis, coefficients) geo_axis = IrregularAxis(crs_axis.label, crs_axis.uom, user_axis.interval.low, high, user_axis.interval.low, coefficients, crs_axis) if not crs_axis.is_x_axis() and not crs_axis.is_y_axis(): # GDAL model is 2D so on any axis except x/y we expect to have only one value grid_low = 0 grid_high = None if user_axis.interval.high is not None: grid_high = 0 else: grid_low = 0 number_of_grid_points = decimal.Decimal(str(user_axis.interval.high)) \ - decimal.Decimal(str(user_axis.interval.low)) # number_of_grid_points = (geo_max - geo_min) / resolution grid_high = grid_low + number_of_grid_points / decimal.Decimal( user_axis.resolution) grid_high = HighPixelAjuster.adjust_high(grid_high) # Negative axis, e.g: Latitude (min <--- max) if user_axis.resolution < 0: grid_high = int(abs(math.floor(grid_high))) else: # Positive axis, e.g: Longitude (min --> max) grid_high = int(abs(math.ceil(grid_high))) # NOTE: Grid Coverage uses the direct intervals as in Rasdaman if self.grid_coverage is False and grid_high is not None: if grid_high > grid_low: grid_high -= 1 grid_axis = GridAxis(user_axis.order, crs_axis.label, user_axis.resolution, grid_low, grid_high) geo_axis.origin = PointPixelAdjuster.get_origin(user_axis, crs_axis) if user_axis.type == UserAxisType.DATE: self._translate_decimal_to_datetime(user_axis, geo_axis) # NOTE: current, gdal recipe supports only has 2 axes which are "bounded" (i.e: they exist as 2D axes in file) # and 1 or more another axes gotten (i.e: from fileName) which are not "bounded" to create 3D+ coverage. data_bound = crs_axis.is_y_axis() or crs_axis.is_x_axis() return AxisSubset( CoverageAxis(geo_axis, grid_axis, data_bound), Interval(user_axis.interval.low, user_axis.interval.high))
def _axis_subset(self, grib_file, evaluated_messages, crs_axis): """ Returns an axis subset using the given crs axis in the context of the grib file :param File grib_file: the current grib file (slice) is evaluated :param List[GirbMessages] evaluated_messages: all Grib messages was evaluated :param CRSAxis crs_axis: the crs definition of the axis :rtype AxisSubset """ # first grib message from grib file, used to extract grib variables only first_grib_message = self.dataset.message(1) # As all the messages contain same axes (but different intervals), so first message is ok to get user_axis first_user_axis = self._get_user_axis_in_evaluated_message( evaluated_messages[0], crs_axis.label) # NOTE: we don't want to change this user_axis belongs to messages, so clone it user_axis = copy.deepcopy(first_user_axis) # Then, we calculate the geo, grid bounds, origin, resolution of this axis for the slice self._set_low_high(evaluated_messages, user_axis) high = user_axis.interval.high if user_axis.interval.high is not None else user_axis.interval.low origin = PointPixelAdjuster.get_origin(user_axis, crs_axis) if isinstance(user_axis, RegularUserAxis): geo_axis = RegularAxis(crs_axis.label, crs_axis.uom, user_axis.interval.low, high, origin, crs_axis) else: # after all messages was evaluated, we could get the direct_positions of the axis as in netcdf # then, it can evaluate the grib sentence normally, e.g: ${grib:axis:level} + 5 evaluating_sentence = user_axis.directPositions direct_positions = self._get_axis_values(evaluated_messages, user_axis) # convert all of values in the list to string then it can be evaluated direct_positions = list_util.to_list_string(direct_positions) evaluator_slice = GribMessageEvaluatorSlice( first_grib_message, grib_file, direct_positions) user_axis.directPositions = self.sentence_evaluator.evaluate( evaluating_sentence, evaluator_slice, user_axis.statements) # axis is datetime if user_axis.type == UserAxisType.DATE: if crs_axis.is_time_day_axis(): coefficients = self._translate_day_date_direct_position_to_coefficients( user_axis.interval.low, user_axis.directPositions) else: coefficients = self._translate_seconds_date_direct_position_to_coefficients( user_axis.interval.low, user_axis.directPositions) else: # number axis like Index1D coefficients = self._translate_number_direct_position_to_coefficients( user_axis.interval.low, user_axis.directPositions) self._update_for_slice_group_size(self.coverage_id, user_axis, crs_axis, coefficients) geo_axis = IrregularAxis(crs_axis.label, crs_axis.uom, user_axis.interval.low, high, origin, coefficients, crs_axis) grid_low = 0 grid_high = PointPixelAdjuster.get_grid_points(user_axis, crs_axis) # NOTE: Grid Coverage uses the direct intervals as in Rasdaman if self.grid_coverage is False and grid_high > grid_low: grid_high -= 1 grid_axis = GridAxis(user_axis.order, crs_axis.label, user_axis.resolution, grid_low, grid_high) if user_axis.type == UserAxisType.DATE: self._translate_decimal_to_datetime(user_axis, geo_axis) return AxisSubset( CoverageAxis(geo_axis, grid_axis, user_axis.dataBound), Interval(user_axis.interval.low, user_axis.interval.high))
def _axis_subset(self, crs_axis, gdal_file): """ Returns an axis subset using the given crs axis in the context of the gdal file :param CRSAxis crs_axis: the crs definition of the axis :param File gdal_file: the gdal file :rtype AxisSubset """ user_axis = self._user_axis( self._get_user_axis_by_crs_axis_name(crs_axis.label), GDALEvaluatorSlice(GDALGmlUtil(gdal_file.get_filepath()))) high = user_axis.interval.high if user_axis.interval.high else user_axis.interval.low if isinstance(user_axis, RegularUserAxis): geo_axis = RegularAxis(crs_axis.label, crs_axis.uom, user_axis.interval.low, high, user_axis.interval.low, crs_axis) else: # if irregular axis value is fetched from fileName so the coefficient is [0] as slicing if user_axis.directPositions == AbstractToCoverageConverter.DIRECT_POSITIONS_SLICING: user_axis.directPositions = AbstractToCoverageConverter.COEFFICIENT_SLICING geo_axis = IrregularAxis(crs_axis.label, crs_axis.uom, user_axis.interval.low, high, user_axis.interval.low, user_axis.directPositions, crs_axis) if not crs_axis.is_easting() and not crs_axis.is_northing(): # GDAL model is 2D so on any axis except x/y we expect to have only one value grid_low = 0 grid_high = 0 else: grid_low = 0 number_of_grid_points = decimal.Decimal(str(user_axis.interval.high)) \ - decimal.Decimal(str(user_axis.interval.low)) # number_of_grid_points = (geo_max - geo_min) / resolution grid_high = grid_low + number_of_grid_points / decimal.Decimal( user_axis.resolution) grid_high = HighPixelAjuster.adjust_high(grid_high) # Negative axis, e.g: Latitude (min <--- max) if user_axis.resolution < 0: grid_high = int(abs(math.floor(grid_high))) else: # Positive axis, e.g: Longitude (min --> max) grid_high = int(abs(math.ceil(grid_high))) # NOTE: Grid Coverage uses the direct intervals as in Rasdaman if self.grid_coverage is False: if grid_high > grid_low: grid_high -= 1 grid_axis = GridAxis(user_axis.order, crs_axis.label, user_axis.resolution, grid_low, grid_high) geo_axis.origin = PointPixelAdjuster.get_origin(user_axis, crs_axis) if user_axis.type == UserAxisType.DATE: self._translate_decimal_to_datetime(user_axis, geo_axis) # NOTE: current, gdal recipe supports only has 2 axes which are "bounded" (i.e: they exist as 2D axes in file) # and 1 or more another axes gotten (i.e: from fileName) which are not "bounded" to create 3D+ coverage. data_bound = crs_axis.is_northing() or crs_axis.is_easting() return AxisSubset( CoverageAxis(geo_axis, grid_axis, data_bound), Interval(user_axis.interval.low, user_axis.interval.high))
def _get_intervals(self, coverage_axes, partition_scheme): """ Returns the slices :param list[CoverageAxis] coverage_axes: a list of coverage axes :param list[int] partition_scheme: a list of the number of pixels to be included on each dimension :rtype: list[list[Interval]] """ intervals = [] for index in range(0, len(coverage_axes)): axis_intervals = [] if coverage_axes[index].axis.coefficient is not None: # Axis is irregular compute it using its coefficient list pop(partition_scheme) origin = coverage_axes[index].axis.origin # if axis is time axis then need to convert coeffcient from datetime to float if ("\"" in origin): origin_date = self.time_util.get_time_crs_origin( coverage_axes[index].axis.crs_axis.uri) # uom here is a URI (e.g: http://www.opengis.net/def/uom/UCUM/0/d ) so need to extract the unit only (d) time_uom = coverage_axes[index].axis.crs_axis.uom.rsplit( '/', 1)[-1] origin = self.time_util.count_offset_dates( origin_date, origin, time_uom) origin = float(origin) resolution = float(coverage_axes[index].grid_axis.resolution) for coefficient in coverage_axes[index].axis.coefficient: # if axis is time axis then need to convert coeffcient from datetime to float if ("\"" in coefficient): coefficient = float( self.time_util.count_offset_dates( origin_date, coefficient, time_uom)) value = origin + resolution * float(coefficient - origin) axis_intervals.append(Interval(value)) else: # Regular axis, compute it by stepping through the spatial domain if coverage_axes[index].axis.crs_axis.is_easting( ) or coverage_axes[index].axis.crs_axis.is_northing(): # For x and y axes we can split them according to the user's partitioning resolution = float( coverage_axes[index].grid_axis.resolution) error_correction = ( resolution / 2) if ConfigManager.subset_correction else 0 geo_pixels_per_slice = float( pop(partition_scheme)) * resolution stop = float(coverage_axes[index].axis.high ) if resolution > 0 else float( coverage_axes[index].axis.low) low = float(coverage_axes[index].axis.low ) if resolution > 0 else float( coverage_axes[index].axis.high) high = low + geo_pixels_per_slice if (resolution > 0 and high >= stop) or (resolution < 0 and stop >= high): high = stop while (resolution > 0 and high <= stop) or (resolution < 0 and stop <= high): if low < high: axis_intervals.append(Interval(low, high)) else: axis_intervals.append(Interval(high, low)) # To make sure there is no grid pixel slipping through the cracks due to the decimal computations # start the next slice with one geo pixel before the last one ended. # Error correction is disabled by default, the user can enable it low = high - error_correction high = low + geo_pixels_per_slice # if the interval is not exactly divided by the number of geo pixels per slice, compute the last slice if ((resolution > 0) and ((low + error_correction) < stop)) or \ ((resolution < 0) and (stop < (low + error_correction))): axis_intervals.append(Interval(low, stop)) index += 1 else: # Not an x, y axis and we are exporting as geotiff, so we cannot honor the user's choice of # partitioning, we have to step exactly one geo pixel each time pop(partition_scheme) resolution = float( coverage_axes[index].grid_axis.resolution) low = coverage_axes[index].axis.low high = coverage_axes[index].axis.high # if low and high are DateTime then need to calculate it to numeric values from origin of time crs if ("\"" in low): origin_date = self.time_util.get_time_crs_origin( coverage_axes[index].axis.crs_axis.uri) # uom here is a URI (e.g: http://www.opengis.net/def/uom/UCUM/0/d ) so need to extract the unit only (d) time_uom = coverage_axes[ index].axis.crs_axis.uom.rsplit('/', 1)[-1] low = self.time_util.count_offset_dates( origin_date, low, time_uom) high = self.time_util.count_offset_dates( origin_date, high, time_uom) low = float(low) if resolution > 0 else float(high) stop = float(high) if resolution > 0 else float(low) while (resolution > 0 and low <= stop) or (resolution < 0 and stop <= low): axis_intervals.append(Interval(low)) low += resolution intervals.append(axis_intervals) return itertools.product(*intervals)