def process(cube: cli.inputcube): """Sets night values to zero for UV index. Args: cube (iris.cube.Cube): Cube that will have night values set to zero. This should contain either diagnostic values or probabilities of UV index above threshold. Returns: iris.cube.Cube: Input cube with all night values set to zero. Raises: ValueError: If input cube is suspicious, within reason. Note that this is a general check: the CLI expects a cube of UV index or probability of UV index above thresold, and will raise an error if given a probability below threshold, but will not recognise a completely inappropriate cube (eg temperature in Kelvin). Therefore this CLI should be used with care. """ import numpy as np from improver.metadata.probabilistic import is_probability from improver.utilities.solar import DayNightMask if is_probability(cube): if "above_threshold" not in cube.name(): raise ValueError(f"{cube.name()} unsuitable for night masking") mask = DayNightMask()(cube).data # Broadcast mask to shape of input cube to account for additional dimensions. mask = np.broadcast_to(mask, cube.shape) # setting night values to zero. cube.data = np.where(mask == DayNightMask().night, 0, cube.data) return cube
def process(cube: cli.inputcube): """Sets night values to zero for UV index. Args: cube (iris.cube.Cube): Cube that will have night values set to zero. Returns: iris.cube.Cube: Input cube with all night values set to zero. """ import numpy as np from improver.utilities.solar import DayNightMask mask = DayNightMask()(cube).data # Broadcast mask to shape of input cube to account for additional dimensions. mask = np.broadcast_to(mask, cube.shape) # setting night values to zero. cube.data = np.where(mask == DayNightMask().night, 0, cube.data) return cube
def process( cube: cli.inputcube, *, coordinates: cli.comma_separated_list = None, percentiles: cli.comma_separated_list = None, ignore_ecc_bounds=False, ): r"""Collapses cube coordinates and calculate percentiled data. Calculate percentiled data over a given coordinate by collapsing that coordinate. Typically used to convert realization data into percentiled data, but may calculate over any dimension coordinate. Alternatively calling this with a dataset containing probabilities will convert those to percentiles using the ensemble coupla coupling plugin. If no particular percentiles are given at which to calculate values and no 'number of percentiles' to calculate are specified, the following defaults will be used. '[0, 5, 10, 20, 25, 30, 40, 50, 60, 70, 75, 80, 90, 95, 100]' Args: cube (iris.cube.Cube): A Cube for processing. coordinates (str or list): Coordinate or coordinates over which to collapse data and calculate percentiles; e.g. 'realization' or 'latitude,longitude'. This argument must be provided when collapsing a coordinate or coordinates to create percentiles, but is redundant when converting probabilities to percentiles and may be omitted. This coordinate(s) will be removed and replaced by a percentile coordinate. percentiles (list): Optional definition of percentiles at which to calculate data. ignore_ecc_bounds (bool): If True, where calculated percentiles are outside the ECC bounds range, raises a warning rather than an exception. Returns: iris.cube.Cube: The processed Cube. Raises: ValueError: If the cube name does not contain 'probability_of\_' and coordinates isn't used. Warns: Warning: If 'probability_of\_' is in the cube name and coordinates is used. """ import warnings import numpy as np from improver.ensemble_copula_coupling.ensemble_copula_coupling import ( ConvertProbabilitiesToPercentiles, ) from improver.metadata.probabilistic import is_probability from improver.percentile import PercentileConverter if percentiles is not None: percentiles = [float(p) for p in percentiles] if is_probability(cube): result = ConvertProbabilitiesToPercentiles( ecc_bounds_warning=ignore_ecc_bounds)(cube, percentiles=percentiles) if coordinates: warnings.warn("Converting probabilities to percentiles. The " "provided COORDINATES_TO_COLLAPSE variable will " "not be used.") else: if not coordinates: raise ValueError("To collapse a coordinate to calculate " "percentiles, a coordinate or list of " "coordinates must be provided.") # Switch back to use the slow scipy method if the cube contains masked # data which the numpy method cannot handle. fast_percentile_method = True if np.ma.is_masked(cube.data): # Check for masked points: fast_percentile_method = False elif np.ma.isMaskedArray(cube.data): # Check if we have a masked array with an empty mask. If so, # replace it with a non-masked array: cube.data = cube.data.data result = PercentileConverter( coordinates, percentiles=percentiles, fast_percentile_method=fast_percentile_method, )(cube) return result
def process( cube: cli.inputcube, mask: cli.inputcube = None, *, neighbourhood_output, neighbourhood_shape="square", radii: cli.comma_separated_list, lead_times: cli.comma_separated_list = None, degrees_as_complex=False, weighted_mode=False, area_sum=False, percentiles: cli.comma_separated_list = DEFAULT_PERCENTILES, halo_radius: float = None, ): """Runs neighbourhood processing. Apply the requested neighbourhood method via the NeighbourhoodProcessing plugin to a Cube. Args: cube (iris.cube.Cube): The Cube to be processed. mask (iris.cube.Cube): A cube to mask the input cube. The data should contain 1 for usable points and 0 for discarded points. Can't be used with "percentiles" as neighbourhood_output (Optional) neighbourhood_output (str): The form of the results generated using neighbourhood processing. If "probabilities" is selected, the mean probability with a neighbourhood is calculated. If "percentiles" is selected, then the percentiles are calculated with a neighbourhood. Calculating percentiles from a neighbourhood is only supported for a circular neighbourhood. Options: "probabilities", "percentiles". neighbourhood_shape (str): Name of the neighbourhood method to use. Only a "circular" neighbourhood shape is applicable for calculating "percentiles" output. Options: "circular", "square". Default: "square". radii (list of float): The radius or a list of radii in metres of the neighbourhood to apply. If it is a list, it must be the same length as lead_times, which defines at which lead time to use which nbhood radius. The radius will be interpolated for intermediate lead times. lead_times (list of int): The lead times in hours that correspond to the radii to be used. If lead_times are set, radii must be a list the same length as lead_times. degrees_as_complex (bool): Include this option to process angles as complex numbers. Not compatible with circular kernel or percentiles. weighted_mode (bool): Include this option to set the weighting to decrease with radius. Otherwise a constant weighting is assumed. weighted_mode is only applicable for calculating "probability" neighbourhood output using the circular kernel. area_sum (bool): Return sum rather than fraction over the neighbourhood area. percentiles (float): Calculates value at the specified percentiles from the neighbourhood surrounding each grid point. This argument has no effect if the output is probabilities. halo_radius (float): Set this radius in metres to define the excess halo to clip. Used where a larger grid was defined than the standard grid and we want to clip the grid back to the standard grid. Otherwise no clipping is applied. Returns: iris.cube.Cube: A processed Cube. Raises: RuntimeError: If weighted_mode is used with the wrong neighbourhood_output. RuntimeError: If degree_as_complex is used with neighbourhood_output='percentiles'. RuntimeError: If degree_as_complex is used with neighbourhood_shape='circular'. """ from improver.nbhood import radius_by_lead_time from improver.nbhood.nbhood import ( GeneratePercentilesFromANeighbourhood, NeighbourhoodProcessing, ) from improver.utilities.pad_spatial import remove_cube_halo from improver.wind_calculations.wind_direction import WindDirection if neighbourhood_output == "percentiles": if weighted_mode: raise RuntimeError("weighted_mode cannot be used with" 'neighbourhood_output="percentiles"') if degrees_as_complex: raise RuntimeError("Cannot generate percentiles from complex " "numbers") if neighbourhood_shape == "circular": if degrees_as_complex: raise RuntimeError( "Cannot process complex numbers with circular neighbourhoods") if degrees_as_complex: # convert cube data into complex numbers cube.data = WindDirection.deg_to_complex(cube.data) radius_or_radii, lead_times = radius_by_lead_time(radii, lead_times) if neighbourhood_output == "probabilities": result = NeighbourhoodProcessing( neighbourhood_shape, radius_or_radii, lead_times=lead_times, weighted_mode=weighted_mode, sum_only=area_sum, re_mask=True, )(cube, mask_cube=mask) elif neighbourhood_output == "percentiles": result = GeneratePercentilesFromANeighbourhood( radius_or_radii, lead_times=lead_times, percentiles=percentiles, )(cube) if degrees_as_complex: # convert neighbourhooded cube back to degrees result.data = WindDirection.complex_to_deg(result.data) if halo_radius is not None: result = remove_cube_halo(result, halo_radius) return result