示例#1
0
def generate_mandatory_attributes(diagnostic_cubes, model_id_attr=None):
    """
    Function to generate mandatory attributes for new diagnostics that are
    generated using several different model diagnostics as input to the
    calculation.  If all input diagnostics have the same attribute use this,
    otherwise set a default value.

    Args:
        diagnostic_cubes (list):
            List of diagnostic cubes used in calculating the new diagnostic
        model_id_attr (str or None):
            Name of attribute used to identify source model for blending,
            if required

    Returns:
        dict: Dictionary of mandatory attribute "key": "value" pairs.
    """
    missing_value = object()
    attr_dicts = [cube.attributes for cube in diagnostic_cubes]
    required_attributes = [model_id_attr] if model_id_attr else []
    attributes = MANDATORY_ATTRIBUTE_DEFAULTS.copy()
    for attr in MANDATORY_ATTRIBUTES + required_attributes:
        unique_values = set(d.get(attr, missing_value) for d in attr_dicts)
        if len(unique_values) == 1 and missing_value not in unique_values:
            attributes[attr], = unique_values
        elif attr in required_attributes:
            msg = ('Required attribute "{}" is missing or '
                   'not the same on all input cubes')
            raise ValueError(msg.format(attr))
    return attributes
示例#2
0
def generate_mandatory_attributes(diagnostic_cubes):
    """
    Function to generate mandatory attributes for new diagnostics that are
    generated using several different model diagnostics as input to the
    calculation.  If all input diagnostics have the same attribute use this,
    otherwise set a default value.

    Args:
        diagnostic_cubes (list):
            List of diagnostic cubes used in calculating the new diagnostic

    Returns:
        dict: Dictionary of mandatory attribute "key": "value" pairs.
    """
    attributes = MANDATORY_ATTRIBUTE_DEFAULTS.copy()
    for attr in MANDATORY_ATTRIBUTES:
        try:
            values = [cube.attributes[attr] for cube in diagnostic_cubes]
        except KeyError:
            # if not all input cubes have this attribute, retain default
            pass
        else:
            unique_values = np.unique(values)
            if len(unique_values) == 1:
                attributes[attr] = unique_values[0]
    return attributes
示例#3
0
 def setUp(self):
     """Set up template with data, coordinates, attributes and cell
     methods"""
     self.template_cube = set_up_variable_cube(
         280 * np.ones((3, 5, 5), dtype=np.float32),
         standard_grid_metadata="uk_det")
     self.template_cube.add_cell_method("time (max): 1 hour")
     self.name = "lwe_precipitation_rate"
     self.units = "mm h-1"
     self.mandatory_attributes = MANDATORY_ATTRIBUTE_DEFAULTS.copy()
示例#4
0
def test_set_attributes():
    """ Tests cube generated with specified attributes and the rest of the values set
    as default values """
    attributes = {"test_attribute": "kittens"}
    cube = generate_metadata(MANDATORY_ATTRIBUTE_DEFAULTS,
                             attributes=attributes)
    expected_attributes = MANDATORY_ATTRIBUTE_DEFAULTS.copy()
    expected_attributes["test_attribute"] = "kittens"
    assert cube.attributes == expected_attributes

    # Assert that no other values have unexpectedly changed by returning changed values
    # to defaults and comparing against default cube
    default_cube = generate_metadata(MANDATORY_ATTRIBUTE_DEFAULTS)
    cube.attributes = default_cube.attributes

    assert cube == default_cube
    def create_coefficient_cube(
        self, data: ndarray, template: Cube, cube_name: str, attributes: Dict
    ) -> Cube:
        """
        Update metadata in smoothing_coefficients cube. Remove any time
        coordinates and rename.

        Args:
            data:
                The smoothing coefficient data to store in the cube.
            template:
                A gradient cube, the dimensions of which are used as a template
                for the coefficient cube.
            cube_name:
                A name for the resultant cube
            attributes:
                A dictionary of attributes for the new cube.

        Returns:
            A new cube of smoothing_coefficients
        """
        for coord in template.coords(dim_coords=False):
            for coord_name in ["time", "period", "realization"]:
                if coord_name in coord.name():
                    template.remove_coord(coord)

        attributes["title"] = "Recursive filter smoothing coefficients"
        attributes.pop("history", None)
        attributes["power"] = self.power

        return create_new_diagnostic_cube(
            cube_name,
            "1",
            template,
            MANDATORY_ATTRIBUTE_DEFAULTS.copy(),
            optional_attributes=attributes,
            data=data,
        )
    def setUp(self):
        """Create data cubes and weights for testing"""
        frt_points = [
            datetime(2015, 11, 19, 0),
            datetime(2015, 11, 19, 1),
            datetime(2015, 11, 19, 2),
        ]

        cube = set_up_variable_cube(
            np.zeros((2, 2), dtype=np.float32),
            name="precipitation_amount",
            units="kg m^-2 s^-1",
            time=datetime(2015, 11, 19, 2),
            frt=datetime(2015, 11, 19, 0),
            standard_grid_metadata="gl_det",
            attributes={"title": "Operational ENGL Model Forecast"},
        )
        self.cube = add_coordinate(
            cube, frt_points, "forecast_reference_time", is_datetime=True
        )
        self.cube.data[0][:][:] = 1.0
        self.cube.data[1][:][:] = 2.0
        self.cube.data[2][:][:] = 3.0
        self.expected_attributes = MANDATORY_ATTRIBUTE_DEFAULTS.copy()
        self.expected_attributes.update(self.cube.attributes)
        self.expected_attributes[
            "title"
        ] = "Post-Processed Operational ENGL Model Forecast"

        cube_threshold = set_up_probability_cube(
            np.zeros((2, 2, 2), dtype=np.float32),
            np.array([0.4, 1], dtype=np.float32),
            variable_name="precipitation_amount",
            threshold_units="kg m^-2 s^-1",
            time=datetime(2015, 11, 19, 2),
            frt=datetime(2015, 11, 19, 0),
            standard_grid_metadata="gl_det",
            attributes={"title": "Operational ENGL Model Forecast"},
        )

        self.cube_threshold = add_coordinate(
            cube_threshold,
            frt_points,
            "forecast_reference_time",
            is_datetime=True,
            order=(1, 0, 2, 3),
        )
        self.cube_threshold.data[0, 0, :, :] = 0.2
        self.cube_threshold.data[0, 1, :, :] = 0.4
        self.cube_threshold.data[0, 2, :, :] = 0.6
        self.cube_threshold.data[1, 0, :, :] = 0.4
        self.cube_threshold.data[1, 1, :, :] = 0.6
        self.cube_threshold.data[1, 2, :, :] = 0.8

        # Weights cubes
        # 3D varying in space and forecast reference time.
        weights3d = np.array(
            [
                [[0.1, 0.3], [0.2, 0.4]],
                [[0.1, 0.3], [0.2, 0.4]],
                [[0.8, 0.4], [0.6, 0.2]],
            ],
            dtype=np.float32,
        )
        self.weights3d = self.cube.copy(data=weights3d)
        self.weights3d.rename("weights")
        self.weights3d.units = "no_unit"
        self.weights3d.attributes = {}

        # 1D varying with forecast reference time.
        weights1d = np.array([0.6, 0.3, 0.1], dtype=np.float32)
        self.weights1d = self.weights3d[:, 0, 0].copy(data=weights1d)
        self.weights1d.remove_coord("latitude")
        self.weights1d.remove_coord("longitude")