示例#1
0
    def _extract_inputs(self, cubes: CubeList) -> Tuple[Cube, Cube]:
        """
        Extract the required input cubes from the input cubelist and check
        they are as required.

        Args:
            cubes:
                A cubelist containing a cube of cloud fraction and one of
                convective ratio.

        Returns:
            The cloud and convection cubes extracted from the cubelist.

        Raises:
            ValueError: If the expected cubes are not within the cubelist.
            ValueError: If the input cubes have different shapes, perhaps due
                        to a missing realization in one and not the other.
        """

        try:
            (cloud, ) = cubes.extract(self.cloud_constraint)
            (convection, ) = cubes.extract(self.convection_constraint)
        except ValueError:
            input_cubes = ", ".join([cube.name() for cube in cubes])
            msg = ("A cloud area fraction and convective ratio are required, "
                   f"but the inputs were: {input_cubes}")
            raise ValueError(msg)

        if cloud.shape != convection.shape:
            msg = (
                "The cloud area fraction and convective ratio cubes are not "
                "the same shape and cannot be combined to generate a shower"
                " probability")
            raise ValueError(msg)
        return cloud, convection
示例#2
0
 def _get_inputs(cubes: CubeList) -> Tuple[Cube, Cube]:
     """
     Separates CAPE and precipitation rate cubes and checks that the following
     match: forecast_reference_time, spatial coords, time-bound interval and
     that CAPE time is at the lower bound of precipitation rate time.
     The precipitation rate data must represent a period of 1 or 3 hours.
     """
     cape = cubes.extract(
         iris.Constraint(
             cube_func=lambda cube: "atmosphere_convective_available_potential_energy"
             in cube.name()
         )
     )
     if cape:
         cape = cape.merge_cube()
     else:
         raise ValueError(
             f"No cube named atmosphere_convective_available_potential_energy found "
             f"in {cubes}"
         )
     precip = cubes.extract(
         iris.Constraint(
             cube_func=lambda cube: "precipitation_rate_max" in cube.name()
         )
     )
     if precip:
         precip = precip.merge_cube()
     else:
         raise ValueError(f"No cube named precipitation_rate_max found in {cubes}")
     (cape_time,) = list(cape.coord("time").cells())
     (precip_time,) = list(precip.coord("time").cells())
     if cape_time.point != precip_time.bound[0]:
         raise ValueError(
             f"CAPE cube time ({cape_time.point}) should be valid at the "
             f"precipitation_rate_max cube lower bound ({precip_time.bound[0]})."
         )
     if np.diff(precip_time.bound) not in [timedelta(hours=1), timedelta(hours=3)]:
         raise ValueError(
             f"Precipitation_rate_max cube time window must be one or three hours, "
             f"not {np.diff(precip_time.bound)}."
         )
     if cape.coord("forecast_reference_time") != precip.coord(
         "forecast_reference_time"
     ):
         raise ValueError(
             "Supplied cubes must have the same forecast reference times"
         )
     if not spatial_coords_match([cape, precip]):
         raise ValueError("Supplied cubes do not have the same spatial coordinates")
     return cape, precip
示例#3
0
    def _get_cube(cubes: CubeList, name: str) -> Cube:
        """
        Get one cube named "name" from the list of cubes and set its units to m s-1.

        Args:
            cubes:
            name:

        Returns:
            Cube with units set
        """
        try:
            (cube, ) = cubes.extract(name)
        except ValueError:
            raise ValueError(
                f"Cannot find a cube named '{name}' in {[c.name() for c in cubes]}"
            )
        if cube.units != "m s-1":
            cube = cube.copy()
            try:
                cube.convert_units("m s-1")
            except ValueError:
                raise ValueError(
                    f"Input {name} cube cannot be converted to 'm s-1' from {cube.units}"
                )
        return cube
示例#4
0
def filter_realizations(cubes: CubeList) -> Cube:
    """For a given list of cubes, identifies the set of times, filters out any realizations
    that are not present at all times and returns a merged cube of the result.

    Args:
        cubes:
            List of cubes to be filtered

    Returns:
        Cube:
            Filtered and merged cube

    """
    times = set()
    realizations = set()
    for cube in cubes:
        times.update([c.point for c in cube.coord("time").cells()])
        realizations.update(cube.coord("realization").points)
    filtered_cubes = CubeList()
    for realization in realizations:
        realization_cube = MergeCubes()(cubes.extract(
            iris.Constraint(realization=realization)))
        if set([c.point
                for c in realization_cube.coord("time").cells()]) == times:
            filtered_cubes.append(realization_cube)
    return MergeCubes()(filtered_cubes)
示例#5
0
文件: temporal.py 项目: tjtg/improver
def extract_cube_at_time(
    cubes: CubeList, time: datetime, time_extract: Constraint
) -> Cube:
    """
    Extract a single cube at a given time from a cubelist.

    Args:
        cubes:
            CubeList of a given diagnostic over several times.
        time:
            Time at which forecast data is needed.
        time_extract:
            Iris constraint for the desired time.

    Returns:
        Cube of data at the desired time.

    Raises:
        ValueError if the desired time is not available within the cubelist.
    """
    try:
        (cube_in,) = cubes.extract(time_extract)
        return cube_in
    except ValueError:
        msg = "Forecast time {} not found within data cubes.".format(
            time.strftime("%Y-%m-%d:%H:%M")
        )
        warnings.warn(msg)
        return None
示例#6
0
    def _get_cube_subsets(self, cubes: CubeList,
                          forecast_period: Union[int, ndarray]) -> CubeList:
        """Finding the subset of cubes from the input cubelist that are
        within the accumulation period, based on the required forecast period
        that defines the upper bound of the accumulation period and the length
        of the accumulation period.

        Args:
            cubes:
                Cubelist containing all the rates cubes that are available
                to be used to calculate accumulations.
            forecast_period:
                Forecast period in seconds matching the upper bound of the
                accumulation period.

        Returns:
            Cubelist that defines the cubes used to calculate
            the accumulations.
        """
        # If the input is a numpy array, get the integer value from the array
        # for use in the constraint.
        if isinstance(forecast_period, np.ndarray):
            (forecast_period, ) = forecast_period
        start_point = forecast_period - self.accumulation_period

        constr = iris.Constraint(
            forecast_period=lambda fp: start_point <= fp <= forecast_period)

        return cubes.extract(constr)
示例#7
0
class Test_extract(tests.IrisTest):
    def setUp(self):
        self.scalar_cubes = CubeList()
        for i in range(5):
            for letter in 'abcd':
                self.scalar_cubes.append(Cube(i, long_name=letter))

    def test_scalar_cube_name_constraint(self):
        # Test the name based extraction of a CubeList containing scalar cubes.
        res = self.scalar_cubes.extract('a')
        expected = CubeList([Cube(i, long_name='a') for i in range(5)])
        self.assertEqual(res, expected)

    def test_scalar_cube_data_constraint(self):
        # Test the extraction of a CubeList containing scalar cubes
        # when using a cube_func.
        val = 2
        constraint = iris.Constraint(cube_func=lambda c: c.data == val)
        res = self.scalar_cubes.extract(constraint)
        expected = CubeList([Cube(val, long_name=letter) for letter in 'abcd'])
        self.assertEqual(res, expected)
示例#8
0
 def setUp(self):
     file_path = tests.get_data_path(
         ("NetCDF", "unstructured_grid", "theta_nodal_xios.nc"))
     loaded_cubes = CubeList(load_cubes(file_path))
     (cube, ) = loaded_cubes.extract(Constraint("theta"))
     # Prune the attributes, just because there are a lot.
     keep_attrs = ["timeStamp", "Conventions"]
     cube.attributes = {
         key: value
         for key, value in cube.attributes.items() if key in keep_attrs
     }
     self.ucube = cube
示例#9
0
 def _extract_cubes(self, conditions: List[str], cubes: CubeList) -> bool:
     """For a given set of conditions, put all matching cubes onto self.cubes and
     put conditions onto self.tree. If ALL conditions are not satisfied, the function
     exits without updating self.cubes or self.tree."""
     matched_cubes = []
     for name in conditions:
         found_cubes = cubes.extract(f"probability_of_{name}_above_threshold")
         if not found_cubes:
             return False
         (found_cube,) = found_cubes  # We expect exactly one cube here
         matched_cubes.append(found_cube)
     self.cubes = matched_cubes
     self.tree = conditions
     return True
示例#10
0
def correct_analyses(cubes):
    newcubes = CubeList()

    for cube in cubes:
        # Squeeze cubes dimensions
        newcube = squeeze(cube)

        # Give time coordinate proper name
        newcube.coord('t').rename('time')

        # Correct dimensional coordinates
        z, y, x, t = newcube.coords()

        z.rename('level_height')
        z.units = 'm'
        z.attributes = {'positive': 'up'}

        y.rename('latitude')
        y.coord_system = lat.coord_system
        y.units = lat.units

        x.rename('longitude')
        x.coord_system = lon.coord_system
        x.units = lon.units

        newcubes.append(newcube)

    # Correct cube names
    for before, after in name_pairs:
        newcubes.extract(before)[0].rename(after)

    # Correct units
    for name, unit in units:
        newcubes.extract(name)[0].units = unit

    return newcubes
 def _fill_months(cube):
     if cube.coord('time').shape[0] == 12:
         return cube
     cubes = CubeList(cube.slices_over('time'))
     model_cube = cubes[0].copy()
     for month in range(1, 13):
         month_constraint = iris.Constraint(
             # pylint: disable=cell-var-from-loop
             time=lambda cell: cell.point.month == month)
         if cubes.extract(month_constraint):
             continue
         cubes.append(
             OSICmorizer._create_nan_cube(model_cube, month, month=True))
     cube = cubes.merge_cube()
     return cube
 def _add_nan_timesteps(cube, total_days):
     add_day_of_year(cube, 'time')
     cubes = CubeList(cube.slices_over('time'))
     model_cube = cubes[0].copy()
     model_cube.remove_coord('day_of_year')
     for day_of_year in range(total_days):
         day_constraint = iris.Constraint(day_of_year=day_of_year + 1)
         if cubes.extract(day_constraint):
             continue
         nan_cube = OSICmorizer._create_nan_cube(model_cube,
                                                 day_of_year,
                                                 month=False)
         add_day_of_year(nan_cube, 'time')
         cubes.append(nan_cube)
     del model_cube
     return cubes
示例#13
0
    def _parse_inputs(self, inputs: List[Cube]) -> None:
        """
        Separates input CubeList into CAPE and precipitation rate objects with standard units
        and raises Exceptions if it can't, or finds excess data.

        Args:
            inputs:
                List of Cubes containing exactly one of CAPE and Precipitation rate.
        Raises:
            ValueError:
                If additional cubes are found
        """
        cubes = CubeList(inputs)
        try:
            (self.cape, self.precip) = cubes.extract(self.cube_names)
        except ValueError as e:
            raise ValueError(
                f"Expected to find cubes of {self.cube_names}, not {[c.name() for c in cubes]}"
            ) from e
        if len(cubes) > 2:
            extras = [
                c.name() for c in cubes if c.name() not in self.cube_names
            ]
            raise ValueError(f"Unexpected Cube(s) found in inputs: {extras}")
        if not spatial_coords_match(inputs):
            raise ValueError(
                f"Spatial coords of input Cubes do not match: {cubes}")
        time_error_msg = self._input_times_error()
        if time_error_msg:
            raise ValueError(time_error_msg)
        self.cape.convert_units("J kg-1")
        self.precip.convert_units("mm h-1")
        if self.model_id_attr:
            if (self.cape.attributes[self.model_id_attr] !=
                    self.precip.attributes[self.model_id_attr]):
                raise ValueError(
                    f"Attribute {self.model_id_attr} does not match on input cubes. "
                    f"{self.cape.attributes[self.model_id_attr]} != "
                    f"{self.precip.attributes[self.model_id_attr]}")
    def test_basic_load(self):
        file_path = tests.get_data_path(
            ("NetCDF", "unstructured_grid", "theta_nodal_xios.nc"))

        # cube = iris.load_cube(file_path, "theta")
        # Note: cannot use iris.load, as merge does not yet preserve
        # the cube 'ugrid' properties.

        # Here's a thing that at least works.
        loaded_cubes = CubeList(load_cubes(file_path))

        # Just check some expected details.
        self.assertEqual(len(loaded_cubes), 2)

        (cube_0, ) = loaded_cubes.extract(Constraint("theta"))

        # Check the primary cube.
        self.assertIsInstance(cube_0, UCube)
        self.assertEqual(cube_0.var_name, "theta")
        self.assertEqual(cube_0.long_name, "Potential Temperature")
        self.assertEqual(cube_0.shape, (1, 6, 866))
        self.assertEqual(
            cube_0.coord_dims(cube_0.coord("time", dim_coords=True)), (0, ))
        self.assertEqual(cube_0.coord_dims("levels"), (1, ))
        self.assertEqual(cube_0.coords(dimensions=2), [])

        # Check the cube.ugrid object.
        cubegrid = cube_0.ugrid
        self.assertIsInstance(cubegrid, CubeUgrid)
        self.assertEqual(cubegrid.cube_dim, 2)
        self.assertEqual(cubegrid.mesh_location, "node")
        self.assertEqual(cubegrid.topology_dimension, 2)
        self.assertEqual(cubegrid.node_coordinates, ["latitude", "longitude"])

        # Check cube.ugrid.grid : a gridded Grid type.
        ugrid = cubegrid.grid
        self.assertIsInstance(ugrid, UGrid)
        self.assertEqual(ugrid.mesh_name, "Mesh0")
示例#15
0
    def _get_input_cubes(self, input_cubes: CubeList) -> None:
        """
        Separates out the rain and snow cubes from the input list and checks that
            * No other cubes are present
            * Cubes represent the same time quantity (instantaneous or accumulation length)
            * Cubes have compatible units
            * Cubes have same dimensions
            * Cubes are not masked (or are masked with an all-False mask)

        Args:
            input_cubes:
                Contains exactly two cubes, one of rain and one of snow. Both must be
                either rates or accumulations of the same length and of compatible units.

        Raises:
            ValueError:
                If any of the criteria above are not met.
        """
        if len(input_cubes) != 2:
            raise ValueError(
                f"Expected exactly 2 input cubes, found {len(input_cubes)}")
        rain_name, snow_name = self._get_input_cube_names(input_cubes)
        self.rain = input_cubes.extract(rain_name).merge_cube()
        self.snow = input_cubes.extract(snow_name).merge_cube()
        self.snow.convert_units(self.rain.units)
        if not spatial_coords_match([self.rain, self.snow]):
            raise ValueError("Rain and snow cubes are not on the same grid")
        if not self.rain.coord("time") == self.snow.coord("time"):
            raise ValueError(
                "Rain and snow cubes do not have the same time coord")
        if np.ma.is_masked(self.rain.data) or np.ma.is_masked(self.snow.data):
            raise ValueError("Unexpected masked data in input cube(s)")
        if isinstance(self.rain.data, np.ma.masked_array):
            self.rain.data = self.rain.data.data
        if isinstance(self.snow.data, np.ma.masked_array):
            self.snow.data = self.snow.data.data
示例#16
0
    def evaluate_extract_expression(
        self, cubes: CubeList, expression: Union[Constraint, List]
    ) -> ndarray:
        """Evaluate a single condition.

        Args:
            cubes:
                A cubelist containing the diagnostics required for the
                weather symbols decision tree, these at co-incident times.
            expression:
                Defined recursively:
                A list consisting of an iris.Constraint or a list of
                iris.Constraint, strings (representing operators) and floats
                is a valid expression.
                A list consisting of valid expressions, strings (representing
                operators) and floats is a valid expression.

        Returns:
            An array or masked array of booleans
        """
        operator_map = {
            "+": operator.add,
            "-": operator.sub,
            "*": operator.mul,
            "/": operator.truediv,
        }
        if isinstance(expression, iris.Constraint):
            return cubes.extract(expression)[0].data
        else:
            curr_expression = copy.deepcopy(expression)
            # evaluate sub-expressions first
            for idx, item in enumerate(expression):
                if isinstance(item, list):
                    curr_expression = (
                        curr_expression[:idx]
                        + [self.evaluate_extract_expression(cubes, item)]
                        + curr_expression[idx + 1 :]
                    )
            # evaluate operators in order of precedence
            for op_str in ["/", "*", "+", "-"]:
                while len(curr_expression) > 1:
                    for idx, item in enumerate(curr_expression):
                        if isinstance(item, str) and (item == op_str):
                            left_arg = curr_expression[idx - 1]
                            right_arg = curr_expression[idx + 1]
                            if isinstance(left_arg, iris.Constraint):
                                left_eval = cubes.extract(left_arg)[0].data
                            else:
                                left_eval = left_arg
                            if isinstance(right_arg, iris.Constraint):
                                right_eval = cubes.extract(right_arg)[0].data
                            else:
                                right_eval = right_arg
                            op = operator_map[op_str]
                            res = op(left_eval, right_eval)
                            curr_expression = (
                                curr_expression[: idx - 1]
                                + [res]
                                + curr_expression[idx + 2 :]
                            )
                            break
                    else:
                        break
            if isinstance(curr_expression[0], iris.Constraint):
                res = cubes.extract(curr_expression[0])[0].data
            return res
示例#17
0
    def check_input_cubes(self, cubes: CubeList) -> Optional[Dict[str, Any]]:
        """
        Check that the input cubes contain all the diagnostics and thresholds
        required by the decision tree.  Sets self.coord_named_threshold to
        "True" if threshold-type coordinates have the name "threshold" (as
        opposed to the standard name of the diagnostic), for backward
        compatibility.

        Args:
            cubes:
                A CubeList containing the input diagnostic cubes.

        Returns:
            A dictionary of (keyword) nodes names where the diagnostic
            data is missing and (values) node associated with
            diagnostic_missing_action.

        Raises:
            IOError:
                Raises an IOError if any of the required input data is missing.
                The error includes details of which fields are missing.
        """
        optional_node_data_missing = {}
        missing_data = []
        for key, query in self.queries.items():
            diagnostics = get_parameter_names(
                expand_nested_lists(query, "diagnostic_fields")
            )
            thresholds = expand_nested_lists(query, "diagnostic_thresholds")
            conditions = expand_nested_lists(query, "diagnostic_conditions")
            for diagnostic, threshold, condition in zip(
                diagnostics, thresholds, conditions
            ):

                # First we check the diagnostic name and units, performing
                # a conversion is required and possible.
                test_condition = iris.Constraint(name=diagnostic)
                matched_cube = cubes.extract(test_condition)
                if not matched_cube:
                    if "diagnostic_missing_action" in query:
                        optional_node_data_missing.update(
                            {key: query[query["diagnostic_missing_action"]]}
                        )
                    else:
                        missing_data.append([diagnostic, threshold, condition])
                    continue

                cube_threshold_units = find_threshold_coordinate(matched_cube[0]).units
                threshold.convert_units(cube_threshold_units)

                # Then we check if the required threshold is present in the
                # cube, and that the thresholding is relative to it correctly.
                threshold = threshold.points.item()
                threshold_name = find_threshold_coordinate(matched_cube[0]).name()

                # Set flag to check for old threshold coordinate names
                if threshold_name == "threshold" and not self.coord_named_threshold:
                    self.coord_named_threshold = True

                # Check threshold == 0.0
                if abs(threshold) < self.float_abs_tolerance:
                    coord_constraint = {
                        threshold_name: lambda cell: np.isclose(
                            cell.point, 0, rtol=0, atol=self.float_abs_tolerance
                        )
                    }
                else:
                    coord_constraint = {
                        threshold_name: lambda cell: np.isclose(
                            cell.point, threshold, rtol=self.float_tolerance, atol=0
                        )
                    }

                # Checks whether the spp__relative_to_threshold attribute is above
                # or below a threshold and and compares to the diagnostic_condition.
                test_condition = iris.Constraint(
                    coord_values=coord_constraint,
                    cube_func=lambda cube: (
                        probability_is_above_or_below(cube) == condition
                    ),
                )
                matched_threshold = matched_cube.extract(test_condition)
                if not matched_threshold:
                    missing_data.append([diagnostic, threshold, condition])

        if missing_data:
            msg = (
                "Weather Symbols input cubes are missing"
                " the following required"
                " input fields:\n"
            )
            dyn_msg = "name: {}, threshold: {}, " "spp__relative_to_threshold: {}\n"
            for item in missing_data:
                msg = msg + dyn_msg.format(*item)
            raise IOError(msg)

        if not optional_node_data_missing:
            optional_node_data_missing = None
        return optional_node_data_missing
示例#18
0
    def _get_input_cubes(self, input_cubes: CubeList) -> None:
        """
        Separates out the rain, sleet, and temperature cubes, checking that:
            * No other cubes are present
            * Cubes have same dimensions
            * Cubes represent the same time quantity (instantaneous or accumulation length)
            * Precipitation cube threshold units are compatible
            * Precipitation cubes have the same set of thresholds
            * A 273.15K (0 Celsius) temperature threshold is available

        The temperature cube is also modified if necessary to return probabilties
        below threshold values. This data is then thinned to return only the
        probabilities of temperature being below the freezing point of water,
        0 Celsius.

        Args:
            input_cubes:
                Contains exactly three cubes, a rain rate or accumulation, a
                sleet rate or accumulation, and an instantaneous or period
                temperature. Accumulations and periods must all represent the
                same length of time.

        Raises:
            ValueError:
                If any of the criteria above are not met.
        """
        if len(input_cubes) != 3:
            raise ValueError(
                f"Expected exactly 3 input cubes, found {len(input_cubes)}")
        rain_name, sleet_name, temperature_name = self._get_input_cube_names(
            input_cubes)
        (self.rain, ) = input_cubes.extract(rain_name)
        (self.sleet, ) = input_cubes.extract(sleet_name)
        (self.temperature, ) = input_cubes.extract(temperature_name)

        if not spatial_coords_match([self.rain, self.sleet, self.temperature]):
            raise ValueError("Input cubes are not on the same grid")
        if (not self.rain.coord("time") == self.sleet.coord("time") ==
                self.temperature.coord("time")):
            raise ValueError("Input cubes do not have the same time coord")

        # Ensure rain and sleet cubes are compatible
        rain_threshold = self.rain.coord(var_name="threshold")
        sleet_threshold = self.sleet.coord(var_name="threshold")
        try:
            sleet_threshold.convert_units(rain_threshold.units)
        except ValueError:
            raise ValueError("Rain and sleet cubes have incompatible units")

        if not all(rain_threshold.points == sleet_threshold.points):
            raise ValueError(
                "Rain and sleet cubes have different threshold values")

        # Ensure probabilities relate to temperatures below a threshold
        temperature_threshold = self.temperature.coord(var_name="threshold")
        self.temperature = to_threshold_inequality(self.temperature,
                                                   above=False)

        # Simplify the temperature cube to the critical threshold of 273.15K,
        # the freezing point of water under typical pressures.
        self.temperature = extract_subcube(
            self.temperature, [f"{temperature_threshold.name()}=273.15"],
            units=["K"])
        if self.temperature is None:
            raise ValueError(
                "No 0 Celsius or equivalent threshold is available "
                "in the temperature data")
示例#19
0
    def process(self, cubelist: CubeList) -> Cube:
        """
        Produce Nowcast of lightning probability.

        Args:
            cubelist:
                Where thresholds are listed, only these threshold values will
                    be used.
                Contains cubes of
                    * First-guess lightning probability
                    * Nowcast precipitation probability
                        (required thresholds: > 0.5, 7., 35. mm hr-1)
                    * Nowcast lightning rate
                    * (optional) Analysis of vertically integrated ice (VII)
                      from radar thresholded into probability slices
                      at self.ice_thresholds.

        Returns:
            Output cube containing Nowcast lightning probability.
            This cube will have the same dimensions as the input
            Nowcast precipitation probability after the threshold coord
            has been removed.

        Raises:
            iris.exceptions.ConstraintMismatchError:
                If cubelist does not contain the expected cubes.
        """
        first_guess_lightning_cube = cubelist.extract(
            "probability_of_rate_of_lightning_above_threshold", strict=True)
        lightning_rate_cube = cubelist.extract("rate_of_lightning",
                                               strict=True)
        lightning_rate_cube.convert_units("min^-1")  # Ensure units are correct
        prob_precip_cube = cubelist.extract(
            "probability_of_lwe_precipitation_rate_above_threshold",
            strict=True)
        # Now find prob_vii_cube. Can't use strict=True here as cube may not be
        # present, so will use a normal extract and then merge_cube if needed.
        prob_vii_cube = cubelist.extract(
            "probability_of_vertical_integral_of_ice_above_threshold")
        if prob_vii_cube:
            prob_vii_cube = prob_vii_cube.merge_cube()
        precip_threshold_coord = find_threshold_coordinate(prob_precip_cube)
        precip_threshold_coord.convert_units("mm hr-1")
        precip_slice = prob_precip_cube.extract(
            iris.Constraint(
                coord_values={
                    precip_threshold_coord: lambda t: isclose(t.point, 0.5)
                }))
        if not isinstance(precip_slice, iris.cube.Cube):
            raise ConstraintMismatchError(
                "Cannot find prob(precip > 0.5 mm hr-1) cube in cubelist.")
        template_cube = self._update_metadata(precip_slice)
        new_cube = self._modify_first_guess(
            template_cube,
            first_guess_lightning_cube,
            lightning_rate_cube,
            prob_precip_cube,
            prob_vii_cube,
        )
        # Adjust data so that lightning probability does not decrease too
        # rapidly with distance.
        self.neighbourhood(new_cube)
        return new_cube
示例#20
0
文件: analysis.py 项目: fsenf/tobac
def cell_statistics(input_cubes,track,mask,aggregators,cell,output_path='./',output_name='Profiles',width=10000,z_coord='model_level_number',dimensions=['x','y'],**kwargs):
    from iris.cube import Cube,CubeList
    from iris.coords import AuxCoord
    from iris import Constraint,save    
    
    # If input is single cube, turn into cubelist
    if type(input_cubes) is Cube:
        input_cubes=CubeList([input_cubes])
    
    logging.debug('Start calculating profiles for cell '+str(cell))
    track_i=track[track['cell']==cell]
    
    cubes_profile={}
    for aggregator in aggregators:
        cubes_profile[aggregator.name()]=CubeList()
        
    for time_i in track_i['time'].values:
        constraint_time = Constraint(time=time_i)
        
        mask_i=mask.extract(constraint_time)
        mask_cell_i=mask_cell(mask_i,cell,track_i,masked=False)
        mask_cell_surface_i=mask_cell_surface(mask_i,cell,track_i,masked=False,z_coord=z_coord)

        x_dim=mask_cell_surface_i.coord_dims('projection_x_coordinate')[0]
        y_dim=mask_cell_surface_i.coord_dims('projection_y_coordinate')[0]
        x_coord=mask_cell_surface_i.coord('projection_x_coordinate')
        y_coord=mask_cell_surface_i.coord('projection_y_coordinate')
    
        if (mask_cell_surface_i.core_data()>0).any():
            box_mask_i=get_bounding_box(mask_cell_surface_i.core_data(),buffer=1)
    
            box_mask=[[x_coord.points[box_mask_i[x_dim][0]],x_coord.points[box_mask_i[x_dim][1]]],
                     [y_coord.points[box_mask_i[y_dim][0]],y_coord.points[box_mask_i[y_dim][1]]]]
        else:
            box_mask=[[np.nan,np.nan],[np.nan,np.nan]]
    
        x=track_i[track_i['time'].values==time_i]['projection_x_coordinate'].values[0]
        y=track_i[track_i['time'].values==time_i]['projection_y_coordinate'].values[0]

        box_slice=[[x-width,x+width],[y-width,y+width]]
               
        x_min=np.nanmin([box_mask[0][0],box_slice[0][0]])
        x_max=np.nanmax([box_mask[0][1],box_slice[0][1]])
        y_min=np.nanmin([box_mask[1][0],box_slice[1][0]])
        y_max=np.nanmax([box_mask[1][1],box_slice[1][1]])

        constraint_x=Constraint(projection_x_coordinate=lambda cell: int(x_min) < cell < int(x_max))
        constraint_y=Constraint(projection_y_coordinate=lambda cell: int(y_min) < cell < int(y_max))

        constraint=constraint_time & constraint_x & constraint_y
#       Mask_cell_surface_i=mask_cell_surface(Mask_w_i,cell,masked=False,z_coord='model_level_number')
        mask_cell_i=mask_cell_i.extract(constraint)
        mask_cell_surface_i=mask_cell_surface_i.extract(constraint)

        input_cubes_i=input_cubes.extract(constraint)
        for cube in input_cubes_i:
            cube_masked=mask_cube_cell(cube,mask_cell_i,cell,track_i)
            coords_remove=[]
            for coordinate in cube_masked.coords(dim_coords=False):

                if coordinate.name() not in dimensions:
                    for dim in dimensions:
                        if set(cube_masked.coord_dims(coordinate)).intersection(set(cube_masked.coord_dims(dim))):
                            coords_remove.append(coordinate.name())
            for coordinate in set(coords_remove):
                cube_masked.remove_coord(coordinate)            
            
            for aggregator in aggregators:
                cube_collapsed=cube_masked.collapsed(dimensions,aggregator,**kwargs)
                #remove all collapsed coordinates (x and y dim, scalar now) and keep only time as all these coordinates are useless
                for coordinate in cube_collapsed.coords():
                    if not cube_collapsed.coord_dims(coordinate):
                        if coordinate.name() is not 'time':
                            cube_collapsed.remove_coord(coordinate)
                logging.debug(str(cube_collapsed))
                cubes_profile[aggregator.name()].append(cube_collapsed)


    minutes=(track_i['time_cell']/pd.Timedelta(minutes=1)).values
    latitude=track_i['latitude'].values
    longitude=track_i['longitude'].values
    minutes_coord=AuxCoord(minutes,long_name='cell_time',units='min')
    latitude_coord=AuxCoord(latitude,long_name='latitude',units='degrees')
    longitude_coord=AuxCoord(longitude,long_name='longitude',units='degrees')
    
    for aggregator in aggregators:
        cubes_profile[aggregator.name()]=cubes_profile[aggregator.name()].merge()
        for cube in cubes_profile[aggregator.name()]:
            cube.add_aux_coord(minutes_coord,data_dims=cube.coord_dims('time'))
            cube.add_aux_coord(latitude_coord,data_dims=cube.coord_dims('time'))
            cube.add_aux_coord(longitude_coord,data_dims=cube.coord_dims('time'))
        os.makedirs(os.path.join(output_path,output_name,aggregator.name()),exist_ok=True)
        savefile=os.path.join(output_path,output_name,aggregator.name(),output_name+'_'+ aggregator.name()+'_'+str(int(cell))+'.nc')
        save(cubes_profile[aggregator.name()],savefile)