コード例 #1
0
 def test_basic(self):
     """Test that conform_metadata returns a cube with a suitable title
     attribute."""
     result = conform_metadata(self.cube, self.cube_orig, self.coord)
     expected_attributes = {'title': 'IMPROVER Model Forecast'}
     self.assertIsInstance(result, iris.cube.Cube)
     self.assertDictEqual(result.attributes, expected_attributes)
コード例 #2
0
 def test_with_model_model_id_and_model_realization(self):
     """Test that a cube is dealt with correctly, if the cube contains a
     model, model_id and model_realization coordinate."""
     coord = "model_id"
     result = conform_metadata(self.cube_model, self.cube_orig_model, coord)
     self.assertFalse(result.coords("model_id"))
     self.assertFalse(result.coords("model_realization"))
コード例 #3
0
 def test_scalar_coordinate_bound_removal(self):
     """Test that if a cube contains scalar coordinates, these coordinates
     do not have bounds."""
     cube = self.cube
     cube.add_aux_coord(
         AuxCoord([10.], standard_name="height", units="m",
                  bounds=np.array([5., 15.])))
     result = conform_metadata(
         self.cube, self.cube_orig, self.coord,
         coords_for_bounds_removal=["height"])
     self.assertFalse(result.coord("height").bounds)
コード例 #4
0
 def test_with_forecast_period(self):
     """Test that a cube is dealt with correctly, if the cube contains
     a forecast_reference_time and forecast_period coordinate."""
     result = conform_metadata(self.cube, self.cube_orig, self.coord)
     self.assertEqual(
         result.coord("forecast_reference_time").points,
         np.max(self.cube_orig.coord("forecast_reference_time").points))
     self.assertFalse(result.coord("forecast_reference_time").bounds)
     self.assertEqual(
         result.coord("forecast_period").points,
         np.min(self.cube_orig.coord("forecast_period").points))
     self.assertFalse(result.coord("forecast_period").bounds)
コード例 #5
0
 def test_without_forecast_period(self):
     """Test that a cube is dealt with correctly, if the cube contains a
     forecast_reference_time coordinate but not a forecast_period."""
     result = conform_metadata(self.cube_without_fp,
                               self.cube_orig_without_fp, self.coord)
     fp_coord = self.cube_orig.coord("forecast_period").copy()
     fp_coord.convert_units("seconds")
     self.assertEqual(
         result.coord("forecast_reference_time").points,
         np.max(self.cube_orig.coord("forecast_reference_time").points))
     self.assertFalse(result.coord("forecast_reference_time").bounds)
     self.assertEqual(
         result.coord("forecast_period").points, np.min(fp_coord.points))
     self.assertFalse(result.coord("forecast_period").bounds)
コード例 #6
0
 def test_with_forecast_period_and_cycletime(self):
     """Test that a cube is dealt with correctly, if the cube contains
     a forecast_reference_time and forecast_period coordinate and a
     cycletime is specified."""
     expected_forecast_reference_time = np.array([402294.])
     expected_forecast_period = np.array([1.])  # 1 hour.
     result = conform_metadata(
         self.cube, self.cube_orig, self.coord, cycletime="20151123T0600Z")
     self.assertArrayAlmostEqual(
         result.coord("forecast_reference_time").points,
         expected_forecast_reference_time)
     self.assertFalse(result.coord("forecast_reference_time").bounds)
     self.assertEqual(
         result.coord("forecast_period").points, expected_forecast_period)
     self.assertFalse(result.coord("forecast_period").bounds)
コード例 #7
0
 def test_without_forecast_period_and_cycletime(self):
     """Test that a cube is dealt with correctly, if the cube contains a
     forecast_reference_time coordinate but not a forecast_period when a
     cycletime is specified. The same value for the forecast_period should
     be created compared to when the when the input cube has a forecast
     period coordinate."""
     expected_forecast_reference_time = np.array([402294.])
     expected_forecast_period = np.array([3600.])
     result = conform_metadata(
         self.cube_without_fp, self.cube_orig_without_fp, self.coord,
         cycletime="20151123T0600Z")
     self.assertEqual(
         result.coord("forecast_reference_time").points,
         expected_forecast_reference_time)
     self.assertFalse(result.coord("forecast_reference_time").bounds)
     self.assertEqual(
         result.coord("forecast_period").points, expected_forecast_period)
     self.assertFalse(result.coord("forecast_period").bounds)
コード例 #8
0
 def test_forecast_coordinate_bounds_removal(self):
     """Test that if a cube has bounds on the forecast period and reference
     time, that these are removed"""
     self.cube_orig.coord("forecast_period").bounds = np.array(
         [[x - 0.5, x + 0.5]
          for x in self.cube_orig.coord("forecast_period").points])
     self.cube_orig.coord("forecast_reference_time").bounds = np.array(
         [[x - 0.5, x + 0.5]
          for x in self.cube_orig.coord("forecast_reference_time").points])
     self.cube.coord("forecast_period").bounds = np.array(
         [[x - 0.5, x + 0.5]
          for x in self.cube.coord("forecast_period").points])
     self.cube.coord("forecast_reference_time").bounds = np.array(
         [[x - 0.5, x + 0.5]
          for x in self.cube.coord("forecast_reference_time").points])
     result = conform_metadata(self.cube, self.cube_orig,
                               "forecast_reference_time")
     self.assertIsNone(result.coord("forecast_reference_time").bounds)
     self.assertIsNone(result.coord("forecast_period").bounds)
コード例 #9
0
 def test_basic(self):
     """Test that conform_metadata returns a cube."""
     result = conform_metadata(self.cube, self.cube_orig, self.coord)
     self.assertIsInstance(result, iris.cube.Cube)
コード例 #10
0
ファイル: weighted_blending.py プロジェクト: kinow/improver
def main(argv=None):
    """Load in arguments and ensure they are set correctly.
       Then load in the data to blend and calculate default weights
       using the method chosen before carrying out the blending."""
    parser = ArgParser(
        description='Calculate the default weights to apply in weighted '
        'blending plugins using the ChooseDefaultWeightsLinear or '
        'ChooseDefaultWeightsNonLinear plugins. Then apply these '
        'weights to the dataset using the BasicWeightedAverage plugin.'
        ' Required for ChooseDefaultWeightsLinear: y0val and ynval.'
        ' Required for ChooseDefaultWeightsNonLinear: cval.'
        ' Required for ChooseWeightsLinear with dict: wts_dict.')

    parser.add_argument('--wts_calc_method',
                        metavar='WEIGHTS_CALCULATION_METHOD',
                        choices=['linear', 'nonlinear', 'dict'],
                        default='linear',
                        help='Method to use to calculate '
                        'weights used in blending. "linear" (default): '
                        'calculate linearly varying blending weights. '
                        '"nonlinear": calculate blending weights that decrease'
                        ' exponentially with increasing blending coordinate. '
                        '"dict": calculate weights using a dictionary passed '
                        'in as a command line argument.')

    parser.add_argument('coordinate',
                        type=str,
                        metavar='COORDINATE_TO_AVERAGE_OVER',
                        help='The coordinate over which the blending '
                        'will be applied.')
    parser.add_argument('--coordinate_unit',
                        metavar='UNIT_STRING',
                        default='hours since 1970-01-01 00:00:00',
                        help='Units for blending coordinate. Default= '
                        'hours since 1970-01-01 00:00:00')
    parser.add_argument('--calendar',
                        metavar='CALENDAR',
                        help='Calendar for time coordinate. Default=gregorian')
    parser.add_argument('--cycletime',
                        metavar='CYCLETIME',
                        type=str,
                        help='The forecast reference time to be used after '
                        'blending has been applied, in the format '
                        'YYYYMMDDTHHMMZ. If not provided, the blended file '
                        'will take the latest available forecast reference '
                        'time from the input cubes supplied.')
    parser.add_argument('--model_id_attr',
                        metavar='MODEL_ID_ATTR',
                        type=str,
                        default="mosg__model_configuration",
                        help='The name of the netCDF file attribute to be '
                        'used to identify the source model for '
                        'multi-model blends. Default assumes Met Office '
                        'model metadata. Must be present on all input '
                        'files if blending over models.')
    parser.add_argument('--spatial_weights_from_mask',
                        action='store_true',
                        default=False,
                        help='If set this option will result in the generation'
                        ' of spatially varying weights based on the'
                        ' masks of the data we are blending. The'
                        ' one dimensional weights are first calculated '
                        ' using the chosen weights calculation method,'
                        ' but the weights will then be adjusted spatially'
                        ' based on where there is masked data in the data'
                        ' we are blending. The spatial weights are'
                        ' calculated using the'
                        ' SpatiallyVaryingWeightsFromMask plugin.')
    parser.add_argument('weighting_mode',
                        metavar='WEIGHTED_BLEND_MODE',
                        choices=['weighted_mean', 'weighted_maximum'],
                        help='The method used in the weighted blend. '
                        '"weighted_mean": calculate a normal weighted'
                        ' mean across the coordinate. '
                        '"weighted_maximum": multiplies the values in the'
                        ' coordinate by the weights, and then takes the'
                        ' maximum.')

    parser.add_argument('input_filepaths',
                        metavar='INPUT_FILES',
                        nargs="+",
                        help='Paths to input files to be blended.')
    parser.add_argument('output_filepath',
                        metavar='OUTPUT_FILE',
                        help='The output path for the processed NetCDF.')

    spatial = parser.add_argument_group(
        'Spatial weights from mask options',
        'Options for calculating the spatial weights using the '
        'SpatiallyVaryingWeightsFromMask plugin.')
    spatial.add_argument('--fuzzy_length',
                         metavar='FUZZY_LENGTH',
                         type=float,
                         default=20000,
                         help='When calculating spatially varying weights we'
                         ' can smooth the weights so that areas close to'
                         ' areas that are masked have lower weights than'
                         ' those further away. This fuzzy length controls'
                         ' the scale over which the weights are smoothed.'
                         ' The fuzzy length is in terms of m, the'
                         ' default is 20km. This distance is then'
                         ' converted into a number of grid squares,'
                         ' which does not have to be an integer. Assumes'
                         ' the grid spacing is the same in the x and y'
                         ' directions, and raises an error if this is not'
                         ' true. See SpatiallyVaryingWeightsFromMask for'
                         ' more detail.')

    linear = parser.add_argument_group(
        'linear weights options', 'Options for the linear weights '
        'calculation in '
        'ChooseDefaultWeightsLinear')
    linear.add_argument('--y0val',
                        metavar='LINEAR_STARTING_POINT',
                        type=float,
                        help='The relative value of the weighting start point '
                        '(lowest value of blend coord) for choosing default '
                        'linear weights. This must be a positive float or 0.')
    linear.add_argument('--ynval',
                        metavar='LINEAR_END_POINT',
                        type=float,
                        help='The relative value of the weighting '
                        'end point (highest value of blend coord) for choosing'
                        ' default linear weights. This must be a positive '
                        'float or 0.  Note that if blending over forecast '
                        'reference time, ynval >= y0val would normally be '
                        'expected (to give greater weight to the more recent '
                        'forecast).')

    nonlinear = parser.add_argument_group(
        'nonlinear weights options', 'Options for the non-linear '
        'weights calculation in '
        'ChooseDefaultWeightsNonLinear')
    nonlinear.add_argument('--cval',
                           metavar='NON_LINEAR_FACTOR',
                           type=float,
                           help='Factor used to determine how skewed the '
                           'non linear weights will be. '
                           'A value of 1 implies equal weighting. If not '
                           'set, a default value of cval=0.85 is set.')

    wts_dict = parser.add_argument_group(
        'dict weights options', 'Options for linear weights to be '
        'calculated based on parameters '
        'read from a json file dict')
    wts_dict.add_argument('--wts_dict',
                          metavar='WEIGHTS_DICTIONARY',
                          help='Path to json file containing dictionary from '
                          'which to calculate blending weights. Dictionary '
                          'format is as specified in the improver.blending.'
                          'weights.ChooseWeightsLinear plugin.')
    wts_dict.add_argument('--weighting_coord',
                          metavar='WEIGHTING_COORD',
                          default='forecast_period',
                          help='Name of '
                          'coordinate over which linear weights should be '
                          'scaled. This coordinate must be avilable in the '
                          'weights dictionary.')

    args = parser.parse_args(args=argv)

    # if the linear weights method is called with non-linear args or vice
    # versa, exit with error
    if (args.wts_calc_method == "linear") and args.cval:
        parser.wrong_args_error('cval', 'linear')
    if ((args.wts_calc_method == "nonlinear")
            and np.any([args.y0val, args.ynval])):
        parser.wrong_args_error('y0val, ynval', 'non-linear')
    if (args.wts_calc_method == "dict") and not args.wts_dict:
        parser.error('Dictionary is required if --wts_calc_method="dict"')

    # set blending coordinate units
    if "time" in args.coordinate:
        coord_unit = Unit(args.coordinate_unit, args.calendar)
    elif args.coordinate_unit != 'hours since 1970-01-01 00:00:00.':
        coord_unit = args.coordinate_unit
    else:
        coord_unit = 'no_unit'

    # For blending across models, only blending across "model_id" is directly
    # supported. This is because the blending coordinate must be sortable, in
    # order to ensure that the data cube and the weights cube have coordinates
    # in the same order for blending. Whilst the model_configuration is
    # sortable itself, as it is associated with model_id, which is the
    # dimension coordinate, sorting the model_configuration coordinate can
    # result in the model_id coordinate becoming non-monotonic. As dimension
    # coordinates must be monotonic, this leads to the model_id coordinate
    # being demoted to an auxiliary coordinate. Therefore, for simplicity
    # model_id is used as the blending coordinate, instead of
    # model_configuration.
    # TODO: Support model_configuration as a blending coordinate directly.
    if args.coordinate == "model_configuration":
        blend_coord = "model_id"
        dict_coord = "model_configuration"
    else:
        blend_coord = args.coordinate
        dict_coord = args.coordinate

    # load cubes to be blended
    cubelist = load_cubelist(args.input_filepaths)

    # determine whether or not to equalise forecast periods for model
    # blending weights calculation
    weighting_coord = (args.weighting_coord
                       if args.weighting_coord else "forecast_period")

    # prepare cubes for weighted blending
    merger = MergeCubesForWeightedBlending(blend_coord,
                                           weighting_coord=weighting_coord,
                                           model_id_attr=args.model_id_attr)
    cube = merger.process(cubelist, cycletime=args.cycletime)

    # if the coord for blending does not exist or has only one value,
    # update metadata only
    coord_names = [coord.name() for coord in cube.coords()]
    if (blend_coord not in coord_names) or (len(
            cube.coord(blend_coord).points) == 1):
        result = cube.copy()
        conform_metadata(result, cube, blend_coord, cycletime=args.cycletime)
        # raise a warning if this happened because the blend coordinate
        # doesn't exist
        if blend_coord not in coord_names:
            warnings.warn('Blend coordinate {} is not present on input '
                          'data'.format(blend_coord))

    # otherwise, calculate weights and blend across specified dimension
    else:
        weights = calculate_blending_weights(
            cube,
            blend_coord,
            args.wts_calc_method,
            wts_dict=args.wts_dict,
            weighting_coord=args.weighting_coord,
            coord_unit=coord_unit,
            y0val=args.y0val,
            ynval=args.ynval,
            cval=args.cval,
            dict_coord=dict_coord)

        if args.spatial_weights_from_mask:
            check_if_grid_is_equal_area(cube)
            grid_cells_x, _ = convert_distance_into_number_of_grid_cells(
                cube, args.fuzzy_length, int_grid_cells=False)
            SpatialWeightsPlugin = SpatiallyVaryingWeightsFromMask(
                grid_cells_x)
            weights = SpatialWeightsPlugin.process(cube, weights, blend_coord)

        # blend across specified dimension
        BlendingPlugin = WeightedBlendAcrossWholeDimension(
            blend_coord, args.weighting_mode, cycletime=args.cycletime)
        result = BlendingPlugin.process(cube, weights=weights)

    save_netcdf(result, args.output_filepath)
コード例 #11
0
    def process(self,
                cubelist,
                cycletime=None,
                model_id_attr=None,
                spatial_weights=False,
                fuzzy_length=20000):
        """
        Merge a cubelist, calculate appropriate blend weights and compute the
        weighted mean. Returns a single cube collapsed over the dimension
        given by self.blend_coord.

        Args:
            cubelist (iris.cube.CubeList):
                List of cubes to be merged and blended

        Kwargs:
            cycletime (str):
                Forecast reference time to use for output cubes, in the format
                YYYYMMDDTHHMMZ.  If not set, the latest of the input cube
                forecast reference times is used.
            model_id_attr (str):
                Name of the attribute by which to identify the source model and
                construct "model" coordinates for blending.
            spatial_weights (bool):
                If true, calculate spatial weights.
            fuzzy_length (float):
                Distance (in metres) over which to smooth spatial weights.
                Default is 20 km.
        """
        # Prepare cubes for weighted blending, including creating model_id and
        # model_configuration coordinates for multi-model blending. The merged
        # cube has a monotonically ascending blend coordinate. Plugin raises an
        # error if blend_coord is not present on all input cubes.
        merger = MergeCubesForWeightedBlending(
            self.blend_coord,
            weighting_coord=self.weighting_coord,
            model_id_attr=model_id_attr)
        cube = merger.process(cubelist, cycletime=cycletime)

        # if blend_coord has only one value, or is not present (case where only
        # one model has been provided for a model blend) update metadata only
        coord_names = [coord.name() for coord in cube.coords()]
        if (self.blend_coord not in coord_names
                or len(cube.coord(self.blend_coord).points) == 1):
            result = cube.copy()
            conform_metadata(result,
                             cube,
                             self.blend_coord,
                             cycletime=cycletime)

        # otherwise, calculate weights and blend across specified dimension
        else:
            # set up special treatment for model blending
            if "model" in self.blend_coord:
                self.blend_coord = "model_id"

            # calculate blend weights
            weights = self._calculate_blending_weights(cube)
            if spatial_weights:
                weights = self._update_spatial_weights(cube, weights,
                                                       fuzzy_length)

            # blend across specified dimension
            BlendingPlugin = WeightedBlendAcrossWholeDimension(
                self.blend_coord, cycletime=cycletime)
            result = BlendingPlugin.process(cube, weights=weights)

        return result