def apply_ice(self, prob_lightning_cube, ice_cube): """ Modify Nowcast of lightning probability with ice data from a radar composite (VII; Vertically Integrated Ice) Args: prob_lightning_cube (iris.cube.Cube): First-guess lightning probability. The forecast_period coord is modified in-place to "minutes". ice_cube (iris.cube.Cube): Analysis of vertically integrated ice (VII) from radar thresholded at self.ice_thresholds. Units of threshold coord modified in-place to kg m^-2 Returns: iris.cube.Cube: Output cube containing updated nowcast lightning probability. This cube will have the same dimensions and meta-data as prob_lightning_cube. The influence of the data in ice_cube reduces linearly to zero as forecast_period increases to 2H30M. Raises: iris.exceptions.ConstraintMismatchError: If ice_cube does not contain the expected thresholds. """ prob_lightning_cube.coord("forecast_period").convert_units("minutes") # check prob-ice threshold units are as expected ice_threshold_coord = find_threshold_coordinate(ice_cube) ice_threshold_coord.convert_units("kg m^-2") new_cube_list = iris.cube.CubeList([]) err_string = "No matching prob(Ice) cube for threshold {}" for cube_slice in prob_lightning_cube.slices_over("time"): fcmins = cube_slice.coord("forecast_period").points[0] for threshold, prob_max in zip(self.ice_thresholds, self.ice_scaling): ice_slice = ice_cube.extract( iris.Constraint(coord_values={ ice_threshold_coord: lambda t: isclose(t.point, threshold) })) if not isinstance(ice_slice, iris.cube.Cube): raise ConstraintMismatchError(err_string.format(threshold)) # Linearly reduce impact of ice as fcmins increases to 2H30M. ice_scaling = [0.0, (prob_max * (1.0 - (fcmins / 150.0)))] if ice_scaling[1] > 0: cube_slice.data = np.maximum( rescale( ice_slice.data, data_range=(0.0, 1.0), scale_range=ice_scaling, clip=True, ), cube_slice.data, ) new_cube_list.append(cube_slice) new_cube = new_cube_list.merge_cube() new_cube = check_cube_coordinates(prob_lightning_cube, new_cube) return new_cube
def process(self, cubelist): """ Produce Nowcast of lightning probability. Args: cubelist (iris.cube.CubeList): Where thresholds are listed, only these threshold values will be used. Contains cubes of * First-guess lightning probability * Nowcast precipitation probability (required thresholds: > 0.5, 7., 35. mm hr-1) * Nowcast lightning rate * (optional) Analysis of vertically integrated ice (VII) from radar thresholded into probability slices at self.ice_thresholds. Returns: iris.cube.Cube: Output cube containing Nowcast lightning probability. This cube will have the same dimensions as the input Nowcast precipitation probability after the threshold coord has been removed. Raises: iris.exceptions.ConstraintMismatchError: If cubelist does not contain the expected cubes. """ first_guess_lightning_cube = cubelist.extract( "probability_of_rate_of_lightning_above_threshold", strict=True) lightning_rate_cube = cubelist.extract( "rate_of_lightning", strict=True) lightning_rate_cube.convert_units("min^-1") # Ensure units are correct prob_precip_cube = cubelist.extract( "probability_of_lwe_precipitation_rate_above_threshold", strict=True) # Now find prob_vii_cube. Can't use strict=True here as cube may not be # present, so will use a normal extract and then merge_cube if needed. prob_vii_cube = cubelist.extract( "probability_of_vertical_integral_of_ice_above_threshold") if prob_vii_cube: prob_vii_cube = prob_vii_cube.merge_cube() precip_threshold_coord = find_threshold_coordinate(prob_precip_cube) precip_threshold_coord.convert_units('mm hr-1') precip_slice = prob_precip_cube.extract( iris.Constraint(coord_values={ precip_threshold_coord: lambda t: isclose(t.point, 0.5)})) if not isinstance(precip_slice, iris.cube.Cube): raise ConstraintMismatchError( "Cannot find prob(precip > 0.5 mm hr-1) cube in cubelist.") template_cube = self._update_metadata(precip_slice) new_cube = self._modify_first_guess( template_cube, first_guess_lightning_cube, lightning_rate_cube, prob_precip_cube, prob_vii_cube) # Adjust data so that lightning probability does not decrease too # rapidly with distance. self.neighbourhood.process(new_cube) return new_cube
def apply_precip(self, prob_lightning_cube: Cube, prob_precip_cube: Cube) -> Cube: """ Modify Nowcast of lightning probability with precipitation rate probabilities at thresholds of 0.5, 7 and 35 mm/h. Args: prob_lightning_cube: First-guess lightning probability. prob_precip_cube: Nowcast precipitation probability (threshold > 0.5, 7., 35. mm hr-1) Units of threshold coord modified in-place to mm hr-1 Returns: Output cube containing updated nowcast lightning probability. This cube will have the same dimensions and meta-data as prob_lightning_cube. Raises: iris.exceptions.ConstraintMismatchError: If prob_precip_cube does not contain the expected thresholds. """ new_cube_list = iris.cube.CubeList([]) # check prob-precip threshold units are as expected precip_threshold_coord = find_threshold_coordinate(prob_precip_cube) precip_threshold_coord.convert_units("mm hr-1") # extract precipitation probabilities at required thresholds for cube_slice in prob_lightning_cube.slices_over("time"): this_time = iris_time_to_datetime( cube_slice.coord("time").copy())[0] this_precip = prob_precip_cube.extract( iris.Constraint(time=this_time) & iris.Constraint(coord_values={ precip_threshold_coord: lambda t: isclose(t.point, 0.5) })) high_precip = prob_precip_cube.extract( iris.Constraint(time=this_time) & iris.Constraint(coord_values={ precip_threshold_coord: lambda t: isclose(t.point, 7.0) })) torr_precip = prob_precip_cube.extract( iris.Constraint(time=this_time) & iris.Constraint(coord_values={ precip_threshold_coord: lambda t: isclose(t.point, 35.0) })) err_string = "No matching {} cube for {}" if not isinstance(this_precip, iris.cube.Cube): raise ConstraintMismatchError( err_string.format("any precip", this_time)) if not isinstance(high_precip, iris.cube.Cube): raise ConstraintMismatchError( err_string.format("high precip", this_time)) if not isinstance(torr_precip, iris.cube.Cube): raise ConstraintMismatchError( err_string.format("intense precip", this_time)) # Increase prob(lightning) to Risk 2 (pl_dict[2]) when # prob(precip > 7mm/hr) > phighthresh cube_slice.data = np.where( (high_precip.data >= self.phighthresh) & (cube_slice.data < self.pl_dict[2]), self.pl_dict[2], cube_slice.data, ) # Increase prob(lightning) to Risk 1 (pl_dict[1]) when # prob(precip > 35mm/hr) > ptorrthresh cube_slice.data = np.where( (torr_precip.data >= self.ptorrthresh) & (cube_slice.data < self.pl_dict[1]), self.pl_dict[1], cube_slice.data, ) # Decrease prob(lightning) where prob(precip > 0.5 mm hr-1) is low. cube_slice.data = apply_double_scaling(this_precip, cube_slice, self.precipthr, self.ltngthr) new_cube_list.append(cube_slice) new_cube = new_cube_list.merge_cube() new_cube = check_cube_coordinates(prob_lightning_cube, new_cube) return new_cube
def _modify_first_guess( self, cube: Cube, first_guess_lightning_cube: Cube, lightning_rate_cube: Cube, prob_precip_cube: Cube, prob_vii_cube: Optional[Cube] = None, ) -> Cube: """ Modify first-guess lightning probability with nowcast data. Args: cube: Provides the meta-data for the Nowcast lightning probability output cube. first_guess_lightning_cube: First-guess lightning probability. Must have same x & y dimensions as cube. Time dimension should overlap that of cube (closest slice in time is used with a maximum time mismatch of 2 hours). This is included to allow this cube to come from a different modelling system, such as the UM. lightning_rate_cube: Nowcast lightning rate. Must have same dimensions as cube. prob_precip_cube: Nowcast precipitation probability (threshold > 0.5, 7, 35). Must have same other dimensions as cube. prob_vii_cube: Radar-derived vertically integrated ice content (VII). Must have same x and y dimensions as cube. Time should be a scalar coordinate. Must have a threshold coordinate with points matching. self.vii_thresholds. Can be <No cube> or None or anything that evaluates to False. Returns: Output cube containing Nowcast lightning probability. Raises: iris.exceptions.ConstraintMismatchError: If lightning_rate_cube or first_guess_lightning_cube do not contain the expected times. """ new_cube_list = iris.cube.CubeList([]) # Loop over required forecast validity times for cube_slice in cube.slices_over("time"): this_time = iris_time_to_datetime( cube_slice.coord("time").copy())[0] lightning_rate_slice = lightning_rate_cube.extract( iris.Constraint(time=this_time)) err_string = "No matching {} cube for {}" if not isinstance(lightning_rate_slice, iris.cube.Cube): raise ConstraintMismatchError( err_string.format("lightning", this_time)) first_guess_slice = extract_nearest_time_point( first_guess_lightning_cube, this_time, allowed_dt_difference=7201) first_guess_slice = cube_slice.copy(data=first_guess_slice.data) first_guess_slice.coord("forecast_period").convert_units("minutes") fcmins = first_guess_slice.coord("forecast_period").points[0] # Increase prob(lightning) to Risk 2 (pl_dict[2]) when # lightning nearby (lrt_lev2) # (and leave unchanged when condition is not met): first_guess_slice.data = np.where( (lightning_rate_slice.data >= self.lrt_lev2) & (first_guess_slice.data < self.pl_dict[2]), self.pl_dict[2], first_guess_slice.data, ) # Increase prob(lightning) to Risk 1 (pl_dict[1]) when within # lightning storm (lrt_lev1): # (and leave unchanged when condition is not met): lratethresh = self.lrt_lev1(fcmins) first_guess_slice.data = np.where( (lightning_rate_slice.data >= lratethresh) & (first_guess_slice.data < self.pl_dict[1]), self.pl_dict[1], first_guess_slice.data, ) new_cube_list.append(first_guess_slice) new_prob_lightning_cube = new_cube_list.merge_cube() new_prob_lightning_cube = check_cube_coordinates( cube, new_prob_lightning_cube) # Apply precipitation adjustments. new_prob_lightning_cube = self.apply_precip(new_prob_lightning_cube, prob_precip_cube) # If we have VII data, increase prob(lightning) accordingly. if prob_vii_cube: new_prob_lightning_cube = self.apply_ice(new_prob_lightning_cube, prob_vii_cube) return new_prob_lightning_cube