def _prepare_features_dataframe(self, feature_cubes: CubeList) -> DataFrame: """Convert gridded feature cubes into a dataframe, with feature variables sorted alphabetically. Note: It is expected that feature_cubes has been aligned using _align_feature_variables prior to calling this function. Args: feature_cubes: Cubelist containing the independent feature variables for prediction. Returns: Dataframe containing flattened feature variables. Raises: ValueError: If flattened cubes have differing length. """ # Get the names of features and sort alphabetically feature_variables = [cube.name() for cube in feature_cubes] feature_variables.sort() # Unpack the cube-data into dataframe to feed into the tree-models. features_df = pd.DataFrame() for feature in feature_variables: cube = feature_cubes.extract_cube(feature) features_df[feature] = cube.data.ravel() return features_df
def process(self, cubelist: CubeList) -> Cube: """ Produce Nowcast of lightning probability. Args: cubelist: Where thresholds are listed, only these threshold values will be used. Contains cubes of * First-guess lightning probability * Nowcast precipitation probability (required thresholds: > 0.5, 7., 35. mm hr-1) * Nowcast lightning rate * (optional) Analysis of vertically integrated ice (VII) from radar thresholded into probability slices at self.ice_thresholds. Returns: Output cube containing Nowcast lightning probability. This cube will have the same dimensions as the input Nowcast precipitation probability after the threshold coord has been removed. Raises: iris.exceptions.ConstraintMismatchError: If cubelist does not contain the expected cubes. """ first_guess_lightning_cube = cubelist.extract_cube( "probability_of_rate_of_lightning_above_threshold") lightning_rate_cube = cubelist.extract_cube("rate_of_lightning") lightning_rate_cube.convert_units("min^-1") # Ensure units are correct prob_precip_cube = cubelist.extract_cube( "probability_of_lwe_precipitation_rate_above_threshold") # Now find prob_vii_cube. Can't use strict=True here as cube may not be # present, so will use a normal extract and then merge_cube if needed. prob_vii_cube = cubelist.extract( "probability_of_vertical_integral_of_ice_above_threshold") if prob_vii_cube: prob_vii_cube = prob_vii_cube.merge_cube() precip_threshold_coord = find_threshold_coordinate(prob_precip_cube) precip_threshold_coord.convert_units("mm hr-1") precip_slice = prob_precip_cube.extract( iris.Constraint( coord_values={ precip_threshold_coord: lambda t: isclose(t.point, 0.5) })) if not isinstance(precip_slice, iris.cube.Cube): raise ConstraintMismatchError( "Cannot find prob(precip > 0.5 mm hr-1) cube in cubelist.") template_cube = self._update_metadata(precip_slice) new_cube = self._modify_first_guess( template_cube, first_guess_lightning_cube, lightning_rate_cube, prob_precip_cube, prob_vii_cube, ) # Adjust data so that lightning probability does not decrease too # rapidly with distance. self.neighbourhood(new_cube) return new_cube