Example #1
0
def with_output(wrapped, *args, output=None, **kwargs):
    """Add `output` keyword only argument.

    This is used to add an extra `output` CLI option. If provided, it saves
    the result of calling `wrapped` to file and returns None, otherwise it
    returns the result.

    Args:
        wrapped (obj):
            The function to be wrapped.
        output (str, optional):
            Output file name. If not supplied, the output object will be
            printed instead.

    Returns:
        Result of calling `wrapped` or None if `output` is given.
    """
    from improver.utilities.save import save_netcdf
    result = wrapped(*args, **kwargs)
    if output:
        save_netcdf(result, output)
        return
    return result
Example #2
0
 def test_wildcard_files_with_constraint(self):
     """Test that the loading works correctly, if a wildcarded filepath is
     provided and a constraint is provided that is only valid for a subset
     of the available files."""
     low_cloud_cube = self.cube.copy()
     low_cloud_cube.rename("low_type_cloud_area_fraction")
     save_netcdf(low_cloud_cube, self.low_cloud_filepath)
     medium_cloud_cube = self.cube.copy()
     medium_cloud_cube.rename("medium_type_cloud_area_fraction")
     save_netcdf(medium_cloud_cube, self.med_cloud_filepath)
     constr = iris.Constraint("low_type_cloud_area_fraction")
     result = load_cubelist(
         [self.low_cloud_filepath, self.med_cloud_filepath],
         constraints=constr)
     self.assertEqual(len(result), 1)
     self.assertArrayAlmostEqual(result[0].coord("realization").points,
                                 self.realization_points)
     self.assertArrayAlmostEqual(result[0].coord("time").points,
                                 self.time_points)
     self.assertArrayAlmostEqual(result[0].coord("latitude").points,
                                 self.latitude_points)
     self.assertArrayAlmostEqual(result[0].coord("longitude").points,
                                 self.longitude_points)
Example #3
0
def test_least_significant_digit(bitshaving_cube, tmp_path, lsd, compress):
    """ Test the least significant digit for bitshaving output files"""
    filepath = tmp_path / "temp.nc"
    save_netcdf(
        bitshaving_cube,
        filepath,
        compression_level=compress,
        least_significant_digit=lsd,
    )

    # check that netcdf metadata has been set
    data = Dataset(filepath, mode="r")
    # pylint: disable=unsubscriptable-object
    assert data.variables["air_temperature"].least_significant_digit == lsd

    file_cube = load_cube(str(filepath))
    abs_diff = np.abs(bitshaving_cube.data.data - file_cube.data.data)
    # check that whole numbers are preserved
    assert np.min(abs_diff) == 0.0
    # check that modified data is accurate to the specified number of digits
    assert 0 < np.median(abs_diff) < 10**(-1.0 * (lsd + 0.5))
    assert 0 < np.mean(abs_diff) < 10**(-1.0 * (lsd + 0.5))
    assert np.max(abs_diff) < 10**(-1.0 * lsd)
Example #4
0
def main(argv=None):
    """ Calculate the UV index using the data
    in the input cubes."""
    parser = ArgParser(description="Calculates the UV index.")
    parser.add_argument("radiation_flux_upward",
                        metavar="RADIATION_FLUX_UPWARD",
                        help="Path to a NetCDF file of radiation flux "
                        "in uv upward at surface.")
    parser.add_argument("radiation_flux_downward",
                        metavar="RADIATION_FLUX_DOWNWARD",
                        help="Path to a NetCDF file of radiation flux "
                        "in uv downward at surface.")
    parser.add_argument("output_filepath",
                        metavar="OUTPUT_FILE",
                        help="The output path for the processed NetCDF")

    args = parser.parse_args(args=argv)

    rad_uv_up = load_cube(args.radiation_flux_upward)
    rad_uv_down = load_cube(args.radiation_flux_downward)

    result = calculate_uv_index(rad_uv_up, rad_uv_down)
    save_netcdf(result, args.output_filepath)
Example #5
0
def main(argv=None):
    """Extend radar mask based on coverage data."""
    parser = ArgParser(description="Extend radar mask based on coverage "
                       "data.")
    parser.add_argument("radar_data_filepath",
                        metavar="RADAR_DATA_FILEPATH",
                        type=str,
                        help="Full path to input NetCDF file "
                        "containing the radar variable to remask.")
    parser.add_argument("coverage_filepath",
                        metavar="COVERAGE_FILEPATH",
                        type=str,
                        help="Full path to input NetCDF file "
                        "containing radar coverage data.")
    parser.add_argument("output_filepath",
                        metavar="OUTPUT_FILEPATH",
                        type=str,
                        help="Full path to save remasked radar data "
                        "NetCDF file.")
    parser.add_argument("--fix_float64",
                        action='store_true',
                        default=False,
                        help="Check and fix cube for float64 data. Without "
                        "this option an exception will be raised if "
                        "float64 data is found but no fix applied.")

    args = parser.parse_args(args=argv)

    # Load Cubes
    radar_data = load_cube(args.radar_data_filepath)
    coverage = load_cube(args.coverage_filepath)

    # Process Cube
    remasked_data = process(coverage, radar_data, args.fix_float64)

    # Save Cube
    save_netcdf(remasked_data, args.output_filepath)
def main(argv=None):
    """ Load in the arguments for feels like temperature and ensure they are
    set correctly. Then calculate the feels like temperature using the data
    in the input cubes."""
    parser = ArgParser(
        description="This calculates the feels like temperature using a "
        "combination of the wind chill index and Steadman's "
        "apparent temperature equation.")
    parser.add_argument("temperature",
                        metavar="TEMPERATURE",
                        help="Path to a NetCDF file of air temperatures at "
                        "screen level.")
    parser.add_argument("wind_speed",
                        metavar="WIND_SPEED",
                        help="Path to the NetCDF file of wind speed at 10m.")
    parser.add_argument("relative_humidity",
                        metavar="RELATIVE_HUMIDITY",
                        help="Path to the NetCDF file of relative humidity "
                        "at screen level.")
    parser.add_argument("pressure",
                        metavar="PRESSURE",
                        help="Path to a NetCDF file of mean sea level "
                        "pressure.")
    parser.add_argument("output_filepath",
                        metavar="OUTPUT_FILE",
                        help="The output path for the processed NetCDF")

    args = parser.parse_args(args=argv)

    temperature = load_cube(args.temperature)
    wind_speed = load_cube(args.wind_speed)
    relative_humidity = load_cube(args.relative_humidity)
    pressure = load_cube(args.pressure)

    result = calculate_feels_like_temperature(temperature, wind_speed,
                                              relative_humidity, pressure)
    save_netcdf(result, args.output_filepath)
Example #7
0
def main(argv=None):
    """Apply lapse rates to temperature data."""
    parser = ArgParser(description='Apply downscaling temperature adjustment '
                       'using calculated lapse rate.')

    parser.add_argument('temperature_filepath',
                        metavar='TEMPERATURE_FILEPATH',
                        help='Full path to input temperature NetCDF file')
    parser.add_argument('lapse_rate_filepath',
                        metavar='LAPSE_RATE_FILEPATH',
                        help='Full path to input lapse rate NetCDF file')
    parser.add_argument('source_orography',
                        metavar='SOURCE_OROG_FILE',
                        help='Full path to NetCDF file containing the source '
                        'model orography')
    parser.add_argument('target_orography',
                        metavar='TARGET_OROG_FILE',
                        help='Full path to target orography NetCDF file '
                        '(to which temperature will be downscaled)')
    parser.add_argument('output_file',
                        metavar='OUTPUT_FILE',
                        help='File name '
                        'to write lapse rate adjusted temperature data')

    args = parser.parse_args(args=argv)

    # Load cubes
    temperature = load_cube(args.temperature_filepath)
    lapse_rate = load_cube(args.lapse_rate_filepath)
    source_orog = load_cube(args.source_orography)
    target_orog = load_cube(args.target_orography)

    # Process Cubes
    adjusted_temperature = process(temperature, lapse_rate, source_orog,
                                   target_orog)
    # Save to Cube
    save_netcdf(adjusted_temperature, args.output_file)
Example #8
0
 def test_ordering_for_realization_threshold_percentile_coordinate(self):
     """Test that the cube has been reordered, if it is originally in an
     undesirable order and the cube contains a "threshold" coordinate,
     a "realization" coordinate and a "percentile" coordinate."""
     cube = set_up_probability_cube(
         np.zeros((3, 4, 5), dtype=np.float32),
         np.array([273., 274., 275.], dtype=np.float32))
     cube = add_coordinate(cube, [0, 1, 2],
                           "realization",
                           dtype=np.int32,
                           coord_units="1")
     cube = add_coordinate(cube, [10, 50, 90],
                           "percentile",
                           dtype=np.float32,
                           coord_units="%")
     cube.transpose([4, 3, 2, 1, 0])
     save_netcdf(cube, self.filepath)
     result = load_cube(self.filepath)
     threshold_coord = find_threshold_coordinate(result)
     self.assertEqual(result.coord_dims("realization")[0], 0)
     self.assertEqual(result.coord_dims("percentile")[0], 1)
     self.assertEqual(result.coord_dims(threshold_coord)[0], 2)
     self.assertArrayAlmostEqual(result.coord_dims("latitude")[0], 3)
     self.assertArrayAlmostEqual(result.coord_dims("longitude")[0], 4)
Example #9
0
def main(argv=None):
    """Load in arguments to calculate mean wind direction from ensemble
       realizations."""

    cli_specific_arguments = [(['--backup_method'],
                               {'dest': 'backup_method',
                                'default': 'neighbourhood',
                                'choices': ['neighbourhood',
                                            'first_realization'],
                                'help': ('Backup method to use if '
                                         'there is low confidence in'
                                         ' the wind_direction. '
                                         'Options are first_realization'
                                         ' or neighbourhood, '
                                         'first_realization should only '
                                         'be used with global lat-lon data. '
                                         'Default is neighbourhood.')})]

    cli_definition = {'central_arguments': ('input_file', 'output_file'),
                      'specific_arguments': cli_specific_arguments,
                      'description': ('Run wind direction to calculate mean'
                                      ' wind direction from '
                                      'ensemble realizations')}

    args = ArgParser(**cli_definition).parse_args(args=argv)

    # Load Cube
    wind_direction = load_cube(args.input_filepath)

    # Returns 3 cubes - r_vals and confidence_measure cubes currently
    # only contain experimental data to be used for further research.
    # Process Cube
    cube_mean_wdir, _, _ = process(wind_direction, args.backup_method)

    # Save Cube
    save_netcdf(cube_mean_wdir, args.output_filepath)
def main(argv=None):
    """Generate target grid with a halo around the source file grid."""

    parser = ArgParser(description='Generate grid with halo from a source '
                       'domain input file. The grid is populated with zeroes.')
    parser.add_argument('input_file',
                        metavar='INPUT_FILE',
                        help="NetCDF file "
                        "containing data on a source grid.")
    parser.add_argument('output_file',
                        metavar='OUTPUT_FILE',
                        help="NetCDF "
                        "file defining the target grid with additional halo.")
    parser.add_argument('--halo_radius',
                        metavar='HALO_RADIUS',
                        default=162000,
                        type=float,
                        help="Size of halo (in m) with which to "
                        "pad the input grid.  Default is 162 000 m.")
    args = parser.parse_args(args=argv)

    cube = load_cube(args.input_file)
    halo_cube = create_cube_with_halo(cube, args.halo_radius)
    save_netcdf(halo_cube, args.output_file)
Example #11
0
def main(argv=None):
    """Load in arguments and get going."""
    parser = ArgParser(
        description="Calculate percentiled data over a given coordinate by "
        "collapsing that coordinate. Typically used to convert realization "
        "data into percentiled data, but may calculate over any "
        "dimension coordinate. Alternatively, calling this CLI with a dataset"
        " containing probabilities will convert those to percentiles using "
        "the ensemble copula coupling plugin. If no particular percentiles "
        "are given at which to calculate values and no 'number of percentiles'"
        " to calculate are specified, the following defaults will be used: "
        "[0, 5, 10, 20, 25, 30, 40, 50, 60, 70, 75, 80, 90, 95, 100]")
    parser.add_argument("input_filepath",
                        metavar="INPUT_FILE",
                        help="A path to an input NetCDF file to be processed")
    parser.add_argument("output_filepath",
                        metavar="OUTPUT_FILE",
                        help="The output path for the processed NetCDF")
    parser.add_argument("--coordinates",
                        metavar="COORDINATES_TO_COLLAPSE",
                        nargs="+",
                        help="Coordinate or coordinates over which to collapse"
                        " data and calculate percentiles; e.g. "
                        "'realization' or 'latitude longitude'. This argument "
                        "must be provided when collapsing a coordinate or "
                        "coordinates to create percentiles, but is redundant "
                        "when converting probabilities to percentiles and may "
                        "be omitted. This coordinate(s) will be removed "
                        "and replaced by a percentile coordinate.")
    parser.add_argument('--ecc_bounds_warning',
                        default=False,
                        action='store_true',
                        help='If True, where calculated percentiles are '
                        'outside the ECC bounds range, raise a warning '
                        'rather than an exception.')
    group = parser.add_mutually_exclusive_group(required=False)
    group.add_argument("--percentiles",
                       metavar="PERCENTILES",
                       nargs="+",
                       default=None,
                       type=float,
                       help="Optional definition of percentiles at which to "
                       "calculate data, e.g. --percentiles 0 33.3 66.6 100")
    group.add_argument('--no-of-percentiles',
                       default=None,
                       type=int,
                       metavar='NUMBER_OF_PERCENTILES',
                       help="Optional definition of the number of percentiles "
                       "to be generated, these distributed regularly with the "
                       "aim of dividing into blocks of equal probability.")

    args = parser.parse_args(args=argv)
    cube = load_cube(args.input_filepath)
    percentiles = args.percentiles
    if args.no_of_percentiles is not None:
        percentiles = choose_set_of_percentiles(args.no_of_percentiles,
                                                sampling="quantile")
    # TODO: Correct when formal cf-standards exists
    if 'probability_of_' in cube.name():
        if args.coordinates:
            warnings.warn("Converting probabilities to percentiles. The "
                          "provided COORDINATES_TO_COLLAPSE variable will "
                          "not be used.")

        result = GeneratePercentilesFromProbabilities(
            ecc_bounds_warning=args.ecc_bounds_warning).process(
                cube, percentiles=percentiles)
    else:
        if not args.coordinates:
            raise ValueError("To collapse a coordinate to calculate "
                             "percentiles, a coordinate or list of "
                             "coordinates must be provided.")

        # Switch back to use the slow scipy method if the cube contains masked
        # data which the numpy method cannot handle.
        fast_percentile_method = True

        if np.ma.is_masked(cube.data):
            # Check for masked points:
            fast_percentile_method = False
        elif np.ma.isMaskedArray(cube.data):
            # Check if we have a masked array with an empty mask. If so,
            # replace it with a non-masked array:
            cube.data = cube.data.data

        result = PercentileConverter(
            args.coordinates,
            percentiles=percentiles,
            fast_percentile_method=fast_percentile_method).process(cube)

    save_netcdf(result, args.output_filepath)
    def setUp(self):
        """Create a cube containing a regular lat-lon grid and other necessary
        ingredients for unit tests."""

        data = np.arange(0, 800, 1)
        data.resize(2, 20, 20)
        latitudes = np.linspace(-90, 90, 20)
        longitudes = np.linspace(-180, 180, 20)
        latitude = DimCoord(latitudes, standard_name='latitude',
                            units='degrees', var_name='latitude')
        longitude = DimCoord(longitudes, standard_name='longitude',
                             units='degrees', var_name='longitude')

        # Use time of 2017-02-17 06:00:00, 07:00:00
        time = DimCoord(
            [1487311200, 1487314800], standard_name='time',
            units=cf_units.Unit('seconds since 1970-01-01 00:00:00',
                                calendar='gregorian'), var_name='time')

        time_dt = dt(2017, 2, 17, 6, 0)
        time_extract = Constraint(time=PartialDateTime(
            time_dt.year, time_dt.month, time_dt.day, time_dt.hour))

        cube = Cube(data,
                    long_name="air_temperature",
                    dim_coords_and_dims=[(time, 0),
                                         (latitude, 1),
                                         (longitude, 2)],
                    units="K")
        cube2 = cube.copy()

        orography = Cube(np.ones((20, 20)),
                         long_name="surface_altitude",
                         dim_coords_and_dims=[(latitude, 0),
                                              (longitude, 1)],
                         units="m")

        land = orography.copy()
        land.rename('land_binary_mask')
        land.data = land.data + 1

        ancillary_data = {}
        ancillary_data.update({'orography': orography})
        ancillary_data.update({'land_mask': land})

        # Copies of cube simply renamed to be read as additional data.
        temperature_on_height_levels = cube.copy()
        temperature_on_height_levels.rename('temperature_on_height_levels')
        pressure_on_height_levels = cube.copy()
        pressure_on_height_levels.rename('pressure_on_height_levels')
        surface_pressure = cube.copy()
        surface_pressure.rename('surface_pressure')

        # Build reference copy of additional_data dictionary.
        with iris.FUTURE.context(cell_datetime_objects=True):
            additional_data = {
                'temperature_on_height_levels': CubeList(
                    [temperature_on_height_levels]),
                'pressure_on_height_levels': CubeList([
                    pressure_on_height_levels]),
                'surface_pressure': CubeList([surface_pressure])
                }

        self.data_directory = mkdtemp()

        self.cube_file = os.path.join(
            self.data_directory,
            '01-temperature_at_screen_level.nc')
        self.cube_file2 = os.path.join(
            self.data_directory,
            '02-temperature_at_screen_level.nc')
        orography_file = os.path.join(self.data_directory, 'orography.nc')
        land_file = os.path.join(self.data_directory, 'land_mask.nc')
        ad_file_temperature = os.path.join(
            self.data_directory,
            'temperature_on_height_levels.nc')
        ad_file_pressure = os.path.join(
            self.data_directory,
            'pressure_on_height_levels.nc')
        ad_file_s_pressure = os.path.join(
            self.data_directory, 'surface_pressure.nc')

        save_netcdf(cube, self.cube_file)
        save_netcdf(cube2, self.cube_file2)
        save_netcdf(orography, orography_file)
        save_netcdf(land, land_file)
        save_netcdf(temperature_on_height_levels, ad_file_temperature)
        save_netcdf(pressure_on_height_levels, ad_file_pressure)
        save_netcdf(surface_pressure, ad_file_s_pressure)

        diagnostic_recipe = {
            "temperature": {
                "diagnostic_name": "air_temperature",
                "extrema": True,
                "filepath": "temperature_at_screen_level",
                "neighbour_finding": {
                    "land_constraint": False,
                    "method": "fast_nearest_neighbour",
                    "vertical_bias": None
                    }
                }
            }

        self.config_file = os.path.join(
            self.data_directory, 'spotdata_diagnostics.json')
        ff = open(self.config_file, 'w')
        json.dump(diagnostic_recipe, ff, sort_keys=True, indent=4,
                  separators=(',', ': ',))
        ff.close()

        self.made_files = [self.cube_file, self.cube_file2, orography_file,
                           land_file, ad_file_temperature, ad_file_pressure,
                           ad_file_s_pressure, self.config_file]

        self.cube = cube
        self.cube2 = cube2
        self.temperature_on_height_levels = temperature_on_height_levels
        self.ancillary_data = ancillary_data
        self.additional_data = additional_data
        self.time_extract = time_extract
Example #13
0
def main(argv=None):
    """Calculate temperature lapse rates."""
    parser = ArgParser(
        description='Calculate temperature lapse rates in units of K m-1 '
        'over a given orography grid. ')
    parser.add_argument('temperature_filepath',
                        metavar='INPUT_TEMPERATURE_FILE',
                        help='A path to an input NetCDF temperature file to'
                        'be processed. ')
    parser.add_argument('--orography_filepath',
                        metavar='INPUT_OROGRAPHY_FILE',
                        help='A path to an input NetCDF orography file. ')
    parser.add_argument('--land_sea_mask_filepath',
                        metavar='LAND_SEA_MASK_FILE',
                        help='A path to an input NetCDF land/sea mask file. ')
    parser.add_argument('output_filepath',
                        metavar='OUTPUT_FILE',
                        help='The output path for the processed temperature '
                        'lapse rates NetCDF. ')
    parser.add_argument('--max_height_diff',
                        metavar='MAX_HEIGHT_DIFF',
                        type=float,
                        default=35,
                        help='Maximum allowable height difference between the '
                        'central point and points in the neighbourhood '
                        'over which the lapse rate will be calculated '
                        '(metres).')
    parser.add_argument('--nbhood_radius',
                        metavar='NBHOOD_RADIUS',
                        type=int,
                        default=7,
                        help='Radius of neighbourhood around each point. '
                        'The neighbourhood will be a square array with '
                        'side length 2*nbhood_radius + 1.')
    parser.add_argument('--max_lapse_rate',
                        metavar='MAX_LAPSE_RATE',
                        type=float,
                        default=-3 * DALR,
                        help='Maximum lapse rate allowed which must be '
                        'provided in units of K m-1. Default is -3*DALR')
    parser.add_argument('--min_lapse_rate',
                        metavar='MIN_LAPSE_RATE',
                        type=float,
                        default=DALR,
                        help='Minimum lapse rate allowed which must be '
                        'provided in units of K m-1. Default is the DALR')
    parser.add_argument('--return_dalr',
                        action='store_true',
                        default=False,
                        help='Flag to return a cube containing the dry '
                        'adiabatic lapse rate rather than calculating '
                        'the true lapse rate.')

    args = parser.parse_args(args=argv)

    # Load Cubes
    temperature_cube = load_cube(args.temperature_filepath)
    orography_cube = None
    land_sea_mask_cube = None
    if not args.return_dalr:
        orography_cube = load_cube(args.orography_filepath)
        land_sea_mask_cube = load_cube(args.land_sea_mask_filepath)

    # Process Cube
    result = process(temperature_cube, orography_cube, land_sea_mask_cube,
                     args.max_height_diff, args.nbhood_radius,
                     args.max_lapse_rate, args.min_lapse_rate,
                     args.return_dalr)

    # Save Cube
    save_netcdf(result, args.output_filepath)
Example #14
0
 def test_update_least_significant_digit(self):
     """Test bitshaving updates metadata correctly if already present"""
     self.cube.attributes["least_significant_digit"] = 0
     save_netcdf(self.cube, self.filepath, least_significant_digit=2)
     cube = load_cube(self.filepath)
     self.assertEqual(cube.attributes["least_significant_digit"], 2)
Example #15
0
def main(argv=None):
    """Load in arguments for estimating coefficients for Ensemble Model Output
       Statistics (EMOS), otherwise known as Non-homogeneous Gaussian
       Regression (NGR). 2 sources of input data must be provided: historical
       forecasts and historical truth data (to use in calibration). The
       estimated coefficients are written to a netCDF file.
    """
    parser = ArgParser(
        description='Estimate coefficients for Ensemble Model Output '
                    'Statistics (EMOS), otherwise known as Non-homogeneous '
                    'Gaussian Regression (NGR). There are two methods for '
                    'inputting data into this CLI, either by providing the '
                    'historic forecasts and truth separately, or by providing '
                    'a combined list of historic forecasts and truths along '
                    'with historic_forecast_identifier and truth_identifier '
                    'arguments to provide metadata that distinguishes between '
                    'them.')
    parser.add_argument('distribution', metavar='DISTRIBUTION',
                        choices=['gaussian', 'truncated_gaussian'],
                        help='The distribution that will be used for '
                             'calibration. This will be dependent upon the '
                             'input phenomenon. This has to be supported by '
                             'the minimisation functions in '
                             'ContinuousRankedProbabilityScoreMinimisers.')
    parser.add_argument('cycletime', metavar='CYCLETIME', type=str,
                        help='This denotes the cycle at which forecasts '
                             'will be calibrated using the calculated '
                             'EMOS coefficients. The validity time in the '
                             'output coefficients cube will be calculated '
                             'relative to this cycletime. '
                             'This cycletime is in the format '
                             'YYYYMMDDTHHMMZ.')

    # Historic forecast and truth filepaths
    parser.add_argument(
        '--historic_filepath', metavar='HISTORIC_FILEPATH', nargs='+',
        help='Paths to the input NetCDF files containing the '
             'historic forecast(s) used for calibration. '
             'This must be supplied with an associated truth filepath. '
             'Specification of either the combined_filepath, '
             'historic_forecast_identifier or the truth_identifier is '
             'invalid with this argument.')
    parser.add_argument(
        '--truth_filepath', metavar='TRUTH_FILEPATH', nargs='+',
        help='Paths to the input NetCDF files containing the '
             'historic truth analyses used for calibration. '
             'This must be supplied with an associated historic filepath. '
             'Specification of either the combined_filepath, '
             'historic_forecast_identifier or the truth_identifier is '
             'invalid with this argument.')

    # Input filepaths
    parser.add_argument(
        '--combined_filepath', metavar='COMBINED_FILEPATH', nargs='+',
        help='Paths to the input NetCDF files containing '
             'both the historic forecast(s) and truth '
             'analyses used for calibration. '
             'This must be supplied with both the '
             'historic_forecast_identifier and the truth_identifier. '
             'Specification of either the historic_filepath or the '
             'truth_filepath is invalid with this argument.')
    parser.add_argument(
        "--historic_forecast_identifier",
        metavar='HISTORIC_FORECAST_IDENTIFIER',
        help='The path to a json file containing metadata '
             'information that defines the historic forecast. '
             'This must be supplied with both the combined_filepath and the '
             'truth_identifier. Specification of either the historic_filepath'
             'or the truth_filepath is invalid with this argument. '
             'The intended contents is described in improver.'
             'ensemble_calibration.ensemble_calibration_utilities.'
             'SplitHistoricForecastAndTruth.')
    parser.add_argument(
        "--truth_identifier", metavar='TRUTH_IDENTIFIER',
        help='The path to a json file containing metadata '
             'information that defines the truth.'
             'This must be supplied with both the combined_filepath and the '
             'historic_forecast_identifier. Specification of either the '
             'historic_filepath or the truth_filepath is invalid with this '
             'argument. The intended contents is described in improver.'
             'ensemble_calibration.ensemble_calibration_utilities.'
             'SplitHistoricForecastAndTruth.')

    # Output filepath
    parser.add_argument('output_filepath', metavar='OUTPUT_FILEPATH',
                        help='The output path for the processed NetCDF')
    # Optional arguments.
    parser.add_argument('--units', metavar='UNITS',
                        help='The units that calibration should be undertaken '
                             'in. The historical forecast and truth will be '
                             'converted as required.')
    parser.add_argument('--predictor_of_mean', metavar='PREDICTOR_OF_MEAN',
                        choices=['mean', 'realizations'], default='mean',
                        help='String to specify the predictor used to '
                             'calibrate the forecast mean. Currently the '
                             'ensemble mean ("mean") and the ensemble '
                             'realizations ("realizations") are supported as '
                             'options. Default: "mean".')
    parser.add_argument('--max_iterations', metavar='MAX_ITERATIONS',
                        type=np.int32, default=1000,
                        help='The maximum number of iterations allowed '
                             'until the minimisation has converged to a '
                             'stable solution. If the maximum number '
                             'of iterations is reached, but the '
                             'minimisation has not yet converged to a '
                             'stable solution, then the available solution '
                             'is used anyway, and a warning is raised.'
                             'This may be modified for testing purposes '
                             'but otherwise kept fixed. If the '
                             'predictor_of_mean is "realizations", '
                             'then the number of iterations may require '
                             'increasing, as there will be more coefficients '
                             'to solve for.')
    args = parser.parse_args(args=argv)

    # Load Cubes
    historic_forecast = load_cube(args.historic_filepath, allow_none=True)
    truth = load_cube(args.truth_filepath, allow_none=True)

    combined = (load_cubelist(args.combined_filepath)
                if args.combined_filepath else None)
    historic_forecast_dict = (
        load_json_or_none(args.historic_forecast_identifier))
    truth_dict = load_json_or_none(args.truth_identifier)

    # Process Cube
    coefficients = process(historic_forecast, truth, combined,
                           historic_forecast_dict, truth_dict,
                           args.distribution, args.cycletime, args.units,
                           args.predictor_of_mean, args.max_iterations)
    # Save Cube
    # Check whether a coefficients cube has been created. If the historic
    # forecasts and truths provided did not match in validity time, then
    # no coefficients would have been calculated.
    if coefficients:
        save_netcdf(coefficients, args.output_filepath)
def main(argv=None):
    """Calculate orographic enhancement of precipitation from model pressure,
    temperature, relative humidity and wind input files"""

    parser = ArgParser(description='Calculate orographic enhancement using the'
                       ' ResolveWindComponents() and OrographicEnhancement() '
                       'plugins. Outputs data on the high resolution orography'
                       ' grid and regridded to the coarser resolution of the '
                       'input diagnostic variables.')

    parser.add_argument('temperature_filepath',
                        metavar='TEMPERATURE_FILEPATH',
                        help='Full path to input NetCDF file of temperature on'
                        ' height levels')
    parser.add_argument('humidity_filepath',
                        metavar='HUMIDITY_FILEPATH',
                        help='Full path to input NetCDF file of relative '
                        'humidity on height levels')
    parser.add_argument('pressure_filepath',
                        metavar='PRESSURE_FILEPATH',
                        help='Full path to input NetCDF file of pressure on '
                        'height levels')
    parser.add_argument('windspeed_filepath',
                        metavar='WINDSPEED_FILEPATH',
                        help='Full path to input NetCDF file of wind speed on '
                        'height levels')
    parser.add_argument('winddir_filepath',
                        metavar='WINDDIR_FILEPATH',
                        help='Full path to input NetCDF file of wind direction'
                        ' on height levels')
    parser.add_argument('orography_filepath',
                        metavar='OROGRAPHY_FILEPATH',
                        help='Full path to input NetCDF high resolution '
                        'orography ancillary. This should be on the same or a '
                        'finer resolution grid than the input variables, and '
                        'defines the grid on which the orographic enhancement '
                        'will be calculated.')
    parser.add_argument('output_dir',
                        metavar='OUTPUT_DIR',
                        help='Directory '
                        'to write output orographic enhancement files')
    parser.add_argument('--boundary_height',
                        type=float,
                        default=1000.,
                        help='Model height level to extract variables for '
                        'calculating orographic enhancement, as proxy for '
                        'the boundary layer.')
    parser.add_argument('--boundary_height_units',
                        type=str,
                        default='m',
                        help='Units of the boundary height specified for '
                        'extracting model levels.')

    args = parser.parse_args(args=argv)

    constraint_info = (args.boundary_height, args.boundary_height_units)

    temperature = load_and_extract(args.temperature_filepath, *constraint_info)
    humidity = load_and_extract(args.humidity_filepath, *constraint_info)
    pressure = load_and_extract(args.pressure_filepath, *constraint_info)
    wind_speed = load_and_extract(args.windspeed_filepath, *constraint_info)
    wind_dir = load_and_extract(args.winddir_filepath, *constraint_info)

    # load high resolution orography
    orography = load_cube(args.orography_filepath)

    orogenh_high_res, orogenh_standard = process(temperature, humidity,
                                                 pressure, wind_speed,
                                                 wind_dir, orography)

    # generate file names
    fname_standard = os.path.join(args.output_dir,
                                  generate_file_name(orogenh_standard))
    fname_high_res = os.path.join(
        args.output_dir,
        generate_file_name(orogenh_high_res,
                           parameter="orographic_enhancement_high_resolution"))

    # save output files
    save_netcdf(orogenh_standard, fname_standard)
    save_netcdf(orogenh_high_res, fname_high_res)
Example #17
0
def main(argv=None):
    """Load in arguments and get going."""
    parser = ArgParser(
        description="Calculate percentiled data over a given coordinate by "
        "collapsing that coordinate. Typically used to convert realization "
        "data into percentiled data, but may calculate over any "
        "dimension coordinate. Alternatively, calling this CLI with a dataset"
        " containing probabilities will convert those to percentiles using "
        "the ensemble copula coupling plugin. If no particular percentiles "
        "are given at which to calculate values and no 'number of percentiles'"
        " to calculate are specified, the following defaults will be used: "
        "[0, 5, 10, 20, 25, 30, 40, 50, 60, 70, 75, 80, 90, 95, 100]")
    parser.add_argument("input_filepath",
                        metavar="INPUT_FILE",
                        help="A path to an input NetCDF file to be processed")
    parser.add_argument("output_filepath",
                        metavar="OUTPUT_FILE",
                        help="The output path for the processed NetCDF")
    parser.add_argument("--coordinates",
                        metavar="COORDINATES_TO_COLLAPSE",
                        nargs="+",
                        help="Coordinate or coordinates over which to collapse"
                        " data and calculate percentiles; e.g. "
                        "'realization' or 'latitude longitude'. This argument "
                        "must be provided when collapsing a coordinate or "
                        "coordinates to create percentiles, but is redundant "
                        "when converting probabilities to percentiles and may "
                        "be omitted. This coordinate(s) will be removed "
                        "and replaced by a percentile coordinate.")
    parser.add_argument('--ecc_bounds_warning',
                        default=False,
                        action='store_true',
                        help='If True, where calculated percentiles are '
                        'outside the ECC bounds range, raise a warning '
                        'rather than an exception.')
    group = parser.add_mutually_exclusive_group(required=False)
    group.add_argument("--percentiles",
                       metavar="PERCENTILES",
                       nargs="+",
                       default=None,
                       type=float,
                       help="Optional definition of percentiles at which to "
                       "calculate data, e.g. --percentiles 0 33.3 66.6 100")
    group.add_argument('--no-of-percentiles',
                       default=None,
                       type=int,
                       metavar='NUMBER_OF_PERCENTILES',
                       help="Optional definition of the number of percentiles "
                       "to be generated, these distributed regularly with the "
                       "aim of dividing into blocks of equal probability.")

    args = parser.parse_args(args=argv)

    # Load Cube
    cube = load_cube(args.input_filepath)

    # Process Cube
    result = process(cube, args.coordinates, args.ecc_bounds_warning,
                     args.percentiles, args.no_of_percentiles)

    # Save Cube
    save_netcdf(result, args.output_filepath)
def main(argv=None):
    """Load in arguments for applying neighbourhood processing when using a
    mask."""
    parser = ArgParser(
        description='Neighbourhood the input dataset over two distinct regions'
        ' of land and sea. If performed as a single level neighbourhood, a '
        'land-sea mask should be provided. If instead topographic_zone '
        'neighbourhooding is being employed, the mask should be one of '
        'topographic zones. In the latter case a weights array is also needed'
        ' to collapse the topographic_zone coordinate. These weights are '
        'created with the improver generate-topography-bands-weights CLI and '
        'should be made using a land-sea mask, which will then be employed '
        'within this code to draw the distinction between the two surface '
        'types.')

    parser.add_argument('input_filepath',
                        metavar='INPUT_FILE',
                        help='A path to an input NetCDF file to be processed.')
    parser.add_argument('input_mask_filepath',
                        metavar='INPUT_MASK',
                        help=('A path to an input NetCDF file containing '
                              'either a mask of topographic zones over land '
                              'or a land-sea mask.'))
    parser.add_argument('output_filepath',
                        metavar='OUTPUT_FILE',
                        help='The output path for the processed NetCDF.')

    mask_group = parser.add_argument_group(
        'Collapse weights - required if using a topographic zones mask')
    mask_group.add_argument('--weights_for_collapsing_dim',
                            metavar='WEIGHTS',
                            default=None,
                            help='A path to an weights NetCDF file containing '
                            'the weights which are used for collapsing the '
                            'dimension gained through masking. These weights '
                            'must have been created using a land-sea mask.')

    radius_group = parser.add_argument_group(
        'Neighbourhooding Radius - Set only one of the options')
    group = radius_group.add_mutually_exclusive_group()
    group.add_argument('--radius',
                       metavar='RADIUS',
                       type=float,
                       help='The radius (in m) for neighbourhood processing.')
    group.add_argument('--radii-by-lead-time',
                       metavar=('RADII_BY_LEAD_TIME', 'LEAD_TIME_IN_HOURS'),
                       nargs=2,
                       help='The radii for neighbourhood processing '
                       'and the associated lead times at which the radii are '
                       'valid. The radii are in metres whilst the lead time '
                       'has units of hours. The radii and lead times are '
                       'expected as individual comma-separated lists with '
                       'the list of radii given first followed by a list of '
                       'lead times to indicate at what lead time each radii '
                       'should be used. For example: 10000,12000,14000 1,2,3 '
                       'where a lead time of 1 hour uses a radius of 10000m, '
                       'a lead time of 2 hours uses a radius of 12000m, etc.')
    parser.add_argument('--sum_or_fraction',
                        default="fraction",
                        choices=["sum", "fraction"],
                        help='The neighbourhood output can either be in the '
                        'form of a sum of the neighbourhood, or a '
                        'fraction calculated by dividing the sum of the '
                        'neighbourhood by the neighbourhood area. '
                        '"fraction" is the default option.')
    parser.add_argument('--intermediate_filepath',
                        default=None,
                        help='Intermediate filepath for results following '
                        'topographic masked neighbourhood processing of '
                        'land points and prior to collapsing the '
                        'topographic_zone coordinate. Intermediate files '
                        'will not be produced if no topographic masked '
                        'neighbourhood processing occurs.')

    args = parser.parse_args(args=argv)

    cube = load_cube(args.input_filepath)
    mask = load_cube(args.input_mask_filepath, no_lazy_load=True)
    masking_coordinate = None

    if any([
            'topographic_zone' in coord.name()
            for coord in mask.coords(dim_coords=True)
    ]):

        if mask.attributes['topographic_zones_include_seapoints'] == 'True':
            raise ValueError('The topographic zones mask cube must have been '
                             'masked to exclude sea points, but '
                             'topographic_zones_include_seapoints = True')

        if not args.weights_for_collapsing_dim:
            raise IOError('A weights cube must be provided if using a mask '
                          'of topographic zones to collapse the resulting '
                          'vertical dimension.')

        weights = load_cube(args.weights_for_collapsing_dim, no_lazy_load=True)
        if weights.attributes['topographic_zones_include_seapoints'] == 'True':
            raise ValueError('The weights cube must be masked to exclude sea '
                             'points, but topographic_zones_include_seapoints '
                             '= True')

        masking_coordinate = 'topographic_zone'
        landmask = weights[0].copy(data=weights[0].data.mask)
        landmask.rename('land_binary_mask')
        landmask.remove_coord(masking_coordinate)
        # Create land and sea masks in IMPROVER format (inverse of
        # numpy standard) 1 - include this region, 0 - exclude this region.
        land_only = landmask.copy(
            data=np.logical_not(landmask.data).astype(int))
        sea_only = landmask.copy(data=landmask.data.astype(int))

    else:
        if args.weights_for_collapsing_dim:
            warnings.warn('A weights cube has been provided but will not be '
                          'used as there is no topographic zone coordinate '
                          'to collapse.')
        landmask = mask
        # In this case the land is set to 1 and the sea is set to 0 in the
        # input mask.
        sea_only = landmask.copy(
            data=np.logical_not(landmask.data).astype(int))
        land_only = landmask.copy(data=landmask.data.astype(int))

    if args.radius:
        radius_or_radii = args.radius
        lead_times = None
    elif args.radii_by_lead_time:
        radius_or_radii = args.radii_by_lead_time[0].split(",")
        lead_times = args.radii_by_lead_time[1].split(",")

    if args.intermediate_filepath is not None and masking_coordinate is None:
        msg = ('No topographic_zone coordinate found, so no intermediate file '
               'will be saved.')
        warnings.warn(msg)

    # Section for neighbourhood processing land points.
    if land_only.data.max() > 0.0:
        if masking_coordinate is not None:
            result_land = ApplyNeighbourhoodProcessingWithAMask(
                masking_coordinate,
                radius_or_radii,
                lead_times=lead_times,
                sum_or_fraction=args.sum_or_fraction,
                re_mask=False).process(cube, mask)
        else:
            result_land = NeighbourhoodProcessing(
                'square',
                radius_or_radii,
                lead_times=lead_times,
                sum_or_fraction=args.sum_or_fraction,
                re_mask=True).process(cube, land_only)

        if masking_coordinate is not None:
            if args.intermediate_filepath is not None:
                save_netcdf(result_land, args.intermediate_filepath)
            # Collapse the masking coordinate.
            result_land = CollapseMaskedNeighbourhoodCoordinate(
                masking_coordinate, weights=weights).process(result_land)

        result = result_land

    # Section for neighbourhood processing sea points.
    if sea_only.data.max() > 0.0:
        result_sea = NeighbourhoodProcessing(
            'square',
            radius_or_radii,
            lead_times=lead_times,
            sum_or_fraction=args.sum_or_fraction,
            re_mask=True).process(cube, sea_only)

        result = result_sea

    # Section for combining land and sea points following land and sea points
    # being neighbourhood processed individually.
    if sea_only.data.max() > 0.0 and land_only.data.max() > 0.0:
        # Recombine cubes to be a single output.
        combined_data = result_land.data.filled(0) + result_sea.data.filled(0)
        result = result_land.copy(data=combined_data)

    save_netcdf(result, args.output_filepath)
Example #19
0
def main(argv=None):
    """Load in arguments and ensure they are set correctly.
       Then load in the data to blend and calculate default weights
       using the method chosen before carrying out the blending."""
    parser = ArgParser(
        description='Calculate the default weights to apply in weighted '
        'blending plugins using the ChooseDefaultWeightsLinear or '
        'ChooseDefaultWeightsNonLinear plugins. Then apply these '
        'weights to the dataset using the BasicWeightedAverage plugin.'
        ' Required for ChooseDefaultWeightsLinear: y0val and ynval.'
        ' Required for ChooseDefaultWeightsNonLinear: cval.'
        ' Required for ChooseWeightsLinear with dict: wts_dict.')

    parser.add_argument('--wts_calc_method',
                        metavar='WEIGHTS_CALCULATION_METHOD',
                        choices=['linear', 'nonlinear', 'dict'],
                        default='linear',
                        help='Method to use to calculate '
                        'weights used in blending. "linear" (default): '
                        'calculate linearly varying blending weights. '
                        '"nonlinear": calculate blending weights that decrease'
                        ' exponentially with increasing blending coordinate. '
                        '"dict": calculate weights using a dictionary passed '
                        'in as a command line argument.')

    parser.add_argument('coordinate',
                        type=str,
                        metavar='COORDINATE_TO_AVERAGE_OVER',
                        help='The coordinate over which the blending '
                        'will be applied.')
    parser.add_argument('--cycletime',
                        metavar='CYCLETIME',
                        type=str,
                        help='The forecast reference time to be used after '
                        'blending has been applied, in the format '
                        'YYYYMMDDTHHMMZ. If not provided, the blended file '
                        'will take the latest available forecast reference '
                        'time from the input cubes supplied.')
    parser.add_argument('--model_id_attr',
                        metavar='MODEL_ID_ATTR',
                        type=str,
                        default=None,
                        help='The name of the netCDF file attribute to be '
                        'used to identify the source model for '
                        'multi-model blends. Default is None. '
                        'Must be present on all input '
                        'files if blending over models.')
    parser.add_argument('--spatial_weights_from_mask',
                        action='store_true',
                        default=False,
                        help='If set this option will result in the generation'
                        ' of spatially varying weights based on the'
                        ' masks of the data we are blending. The'
                        ' one dimensional weights are first calculated '
                        ' using the chosen weights calculation method,'
                        ' but the weights will then be adjusted spatially'
                        ' based on where there is masked data in the data'
                        ' we are blending. The spatial weights are'
                        ' calculated using the'
                        ' SpatiallyVaryingWeightsFromMask plugin.')

    parser.add_argument('input_filepaths',
                        metavar='INPUT_FILES',
                        nargs="+",
                        help='Paths to input files to be blended.')
    parser.add_argument('output_filepath',
                        metavar='OUTPUT_FILE',
                        help='The output path for the processed NetCDF.')

    spatial = parser.add_argument_group(
        'Spatial weights from mask options',
        'Options for calculating the spatial weights using the '
        'SpatiallyVaryingWeightsFromMask plugin.')
    spatial.add_argument('--fuzzy_length',
                         metavar='FUZZY_LENGTH',
                         type=float,
                         default=20000,
                         help='When calculating spatially varying weights we'
                         ' can smooth the weights so that areas close to'
                         ' areas that are masked have lower weights than'
                         ' those further away. This fuzzy length controls'
                         ' the scale over which the weights are smoothed.'
                         ' The fuzzy length is in terms of m, the'
                         ' default is 20km. This distance is then'
                         ' converted into a number of grid squares,'
                         ' which does not have to be an integer. Assumes'
                         ' the grid spacing is the same in the x and y'
                         ' directions, and raises an error if this is not'
                         ' true. See SpatiallyVaryingWeightsFromMask for'
                         ' more detail.')

    linear = parser.add_argument_group(
        'linear weights options', 'Options for the linear weights '
        'calculation in '
        'ChooseDefaultWeightsLinear')
    linear.add_argument('--y0val',
                        metavar='LINEAR_STARTING_POINT',
                        type=float,
                        help='The relative value of the weighting start point '
                        '(lowest value of blend coord) for choosing default '
                        'linear weights. This must be a positive float or 0.')
    linear.add_argument('--ynval',
                        metavar='LINEAR_END_POINT',
                        type=float,
                        help='The relative value of the weighting '
                        'end point (highest value of blend coord) for choosing'
                        ' default linear weights. This must be a positive '
                        'float or 0.  Note that if blending over forecast '
                        'reference time, ynval >= y0val would normally be '
                        'expected (to give greater weight to the more recent '
                        'forecast).')

    nonlinear = parser.add_argument_group(
        'nonlinear weights options', 'Options for the non-linear '
        'weights calculation in '
        'ChooseDefaultWeightsNonLinear')
    nonlinear.add_argument('--cval',
                           metavar='NON_LINEAR_FACTOR',
                           type=float,
                           help='Factor used to determine how skewed the '
                           'non linear weights will be. A value of 1 '
                           'implies equal weighting.')

    wts_dict = parser.add_argument_group(
        'dict weights options', 'Options for linear weights to be '
        'calculated based on parameters '
        'read from a json file dict')
    wts_dict.add_argument('--wts_dict',
                          metavar='WEIGHTS_DICTIONARY',
                          help='Path to json file containing dictionary from '
                          'which to calculate blending weights. Dictionary '
                          'format is as specified in the improver.blending.'
                          'weights.ChooseWeightsLinear plugin.')
    wts_dict.add_argument('--weighting_coord',
                          metavar='WEIGHTING_COORD',
                          default='forecast_period',
                          help='Name of '
                          'coordinate over which linear weights should be '
                          'scaled. This coordinate must be available in the '
                          'weights dictionary.')

    args = parser.parse_args(args=argv)

    # reject incorrect argument combinations
    if (args.wts_calc_method == "linear") and args.cval:
        parser.wrong_args_error('cval', 'linear')
    if ((args.wts_calc_method == "nonlinear")
            and np.any([args.y0val, args.ynval])):
        parser.wrong_args_error('y0val, ynval', 'non-linear')

    if (args.wts_calc_method == "dict") and not args.wts_dict:
        parser.error('Dictionary is required if --wts_calc_method="dict"')

    weights_dict = load_json_or_none(args.wts_dict)

    # Load cubes to be blended.
    cubelist = load_cubelist(args.input_filepaths)

    result = process(cubelist, args.wts_calc_method, args.coordinate,
                     args.cycletime, args.weighting_coord, weights_dict,
                     args.y0val, args.ynval, args.cval, args.model_id_attr,
                     args.spatial_weights_from_mask, args.fuzzy_length)

    save_netcdf(result, args.output_filepath)
Example #20
0
def main(argv=None):
    """Load in arguments and get going."""
    parser = ArgParser(
        description='Run wind downscaling to apply roughness correction and'
                    ' height correction to wind fields (as described in'
                    ' Howard and Clark [2007]). All inputs must be on the same'
                    ' standard grid')
    parser.add_argument('wind_speed_filepath', metavar='WIND_SPEED_FILE',
                        help='Location of the wind speed on standard grid'
                             ' file. Any units can be supplied.')
    parser.add_argument('silhouette_roughness_filepath', metavar='AOS_FILE',
                        help='Location of model silhouette roughness file. '
                             'Units of field: dimensionless')
    parser.add_argument('sigma_filepath', metavar='SIGMA_FILE',
                        help='Location of standard deviation of model '
                             'orography height file. Units of field: m')
    parser.add_argument('target_orog_filepath',
                        metavar='TARGET_OROGRAPHY_FILE',
                        help='Location of target orography file to downscale'
                             ' fields to.'
                             'Units of field: m')
    parser.add_argument('standard_orog_filepath',
                        metavar='STANDARD_OROGRAPHY_FILE',
                        help='Location of orography on standard grid file '
                             '(interpolated model orography.'
                             ' Units of field: m')
    parser.add_argument('model_resolution', metavar='MODEL_RESOLUTION',
                        help='Original resolution of model orography (before'
                             ' interpolation to standard grid).'
                             ' Units of field: m')
    parser.add_argument('output_filepath', metavar='OUTPUT_FILE',
                        help='The output path for the processed NetCDF')
    parser.add_argument('--output_height_level', metavar='OUTPUT_HEIGHT_LEVEL',
                        default=None,
                        help='If only a single height level is desired as '
                        'output from wind-downscaling, this option can be '
                        'used to select the height level. If no units are '
                        'provided with the --output_height_level_units '
                        'option, metres are assumed.')
    parser.add_argument('--output_height_level_units',
                        metavar='OUTPUT_HEIGHT_LEVEL_UNITS', default='m',
                        help='If a single height level is selected as output '
                        'using the --output_height_level option, this '
                        'additional argument may be used to specify the units '
                        'of the value entered to select the level. e.g. hPa')
    parser.add_argument('--height_levels_filepath',
                        metavar='HEIGHT_LEVELS_FILE',
                        help='Location of file containing height levels '
                             'coincident with wind speed field.')
    parser.add_argument('--veg_roughness_filepath',
                        metavar='VEGETATIVE_ROUGHNESS_LENGTH_FILE',
                        help='Location of vegetative roughness length file.'
                             ' Units of field: m')
    args = parser.parse_args(args=argv)

    if args.output_height_level_units and not args.output_height_level:
        warnings.warn('--output_height_level_units has been set but no '
                      'associated height level has been provided. These units '
                      'will have no effect.')

    # Turn string to float
    model_resolution = float(args.model_resolution) if \
        args.model_resolution is not None else None
    output_height_level = float(args.output_height_level) if \
        args.output_height_level is not None else None

    # Load Cube
    wind_speed = load_cube(args.wind_speed_filepath)
    silhouette_roughness = load_cube(
        args.silhouette_roughness_filepath)
    sigma = load_cube(args.sigma_filepath)
    target_orog = load_cube(args.target_orog_filepath)
    standard_orog = load_cube(args.standard_orog_filepath)
    height_levels = load_cube(args.height_levels_filepath, allow_none=True)
    veg_roughness_cube = load_cube(args.veg_roughness_filepath,
                                   allow_none=True)

    # Process Cube
    wind_speed = process(wind_speed, silhouette_roughness, sigma, target_orog,
                         standard_orog, model_resolution, height_levels,
                         veg_roughness_cube, output_height_level,
                         args.output_height_level_units)

    # Save Cube
    save_netcdf(wind_speed, args.output_filepath)
Example #21
0
 def test_error_unknown_units(self):
     """Test key error when trying to save a cube with no units"""
     no_units_cube = iris.cube.Cube(np.array([1], dtype=np.float32))
     msg = "has unknown units"
     with self.assertRaisesRegex(ValueError, msg):
         save_netcdf(no_units_cube, self.filepath)
Example #22
0
def main(argv=None):
    """Load in arguments and get going."""
    parser = ArgParser(
        description="Run a recursive filter to convert a square neighbourhood "
        "into a Gaussian-like kernel or smooth over short "
        "distances. The filter uses an alpha parameter (0 < alpha < 1) to "
        "control what proportion of the probability is passed onto the next "
        "grid-square in the x and y directions. The alpha parameter can be "
        "set on a grid-square by grid-square basis for the x and y directions "
        "separately (using two arrays of alpha parameters of the same "
        "dimensionality as the domain). Alternatively a single alpha value "
        "can be set for each of the x and y directions. These methods can be "
        "mixed, e.g. an array for the x direction and a float for the y "
        "direction and vice versa.")
    parser.add_argument("input_filepath",
                        metavar="INPUT_FILE",
                        help="A path to an input NetCDF file to be processed")
    parser.add_argument("output_filepath",
                        metavar="OUTPUT_FILE",
                        help="The output path for the processed NetCDF")
    parser.add_argument("--input_filepath_alphas_x",
                        metavar="ALPHAS_X_FILE",
                        help="A path to a NetCDF file describing the alpha "
                        "factors to be used for smoothing in the x "
                        "direction")
    parser.add_argument("--input_filepath_alphas_y",
                        metavar="ALPHAS_Y_FILE",
                        help="A path to a NetCDF file describing the alpha "
                        "factors to be used for smoothing in the y "
                        "direction")
    parser.add_argument("--alpha_x",
                        metavar="ALPHA_X",
                        default=None,
                        type=float,
                        help="A single alpha factor (0 < alpha_x < 1) to be "
                        "applied to every grid square in the x "
                        "direction.")
    parser.add_argument("--alpha_y",
                        metavar="ALPHA_Y",
                        default=None,
                        type=float,
                        help="A single alpha factor (0 < alpha_y < 1) to be "
                        "applied to every grid square in the y "
                        "direction.")
    parser.add_argument("--iterations",
                        metavar="ITERATIONS",
                        default=1,
                        type=int,
                        help="Number of times to apply the filter, default=1 "
                        "(typically < 5)")
    parser.add_argument('--input_mask_filepath',
                        metavar='INPUT_MASK_FILE',
                        help='A path to an input mask NetCDF file to be '
                        'used to mask the input file.')
    parser.add_argument("--re_mask",
                        action='store_true',
                        default=False,
                        help="Re-apply mask to recursively filtered output.")

    args = parser.parse_args(args=argv)

    # Load Cubes.
    cube = load_cube(args.input_filepath)
    mask_cube = load_cube(args.input_mask_filepath, allow_none=True)
    alphas_x_cube = load_cube(args.input_filepath_alphas_x, allow_none=True)
    alphas_y_cube = load_cube(args.input_filepath_alphas_y, allow_none=True)
    # Process Cube
    result = process(cube, mask_cube, alphas_x_cube, alphas_y_cube,
                     args.alpha_x, args.alpha_y, args.iterations, args.re_mask)
    # Save Cube
    save_netcdf(result, args.output_filepath)
Example #23
0
def main(argv=None):
    """Load in arguments for applying neighbourhood processing when using a
    mask."""
    parser = ArgParser(
        description='Neighbourhood the input dataset over two distinct regions'
        ' of land and sea. If performed as a single level neighbourhood, a '
        'land-sea mask should be provided. If instead topographic_zone '
        'neighbourhooding is being employed, the mask should be one of '
        'topographic zones. In the latter case a weights array is also needed'
        ' to collapse the topographic_zone coordinate. These weights are '
        'created with the improver generate-topography-bands-weights CLI and '
        'should be made using a land-sea mask, which will then be employed '
        'within this code to draw the distinction between the two surface '
        'types.')

    parser.add_argument('input_filepath',
                        metavar='INPUT_FILE',
                        help='A path to an input NetCDF file to be processed.')
    parser.add_argument('input_mask_filepath',
                        metavar='INPUT_MASK',
                        help=('A path to an input NetCDF file containing '
                              'either a mask of topographic zones over land '
                              'or a land-sea mask.'))
    parser.add_argument('output_filepath',
                        metavar='OUTPUT_FILE',
                        help='The output path for the processed NetCDF.')

    mask_group = parser.add_argument_group(
        'Collapse weights - required if using a topographic zones mask')
    mask_group.add_argument('--weights_for_collapsing_dim',
                            metavar='WEIGHTS',
                            default=None,
                            help='A path to an weights NetCDF file containing '
                            'the weights which are used for collapsing the '
                            'dimension gained through masking. These weights '
                            'must have been created using a land-sea mask.')

    radius_group = parser.add_argument_group(
        'Neighbourhooding Radius - Set only one of the options')
    group = radius_group.add_mutually_exclusive_group()
    group.add_argument('--radius',
                       metavar='RADIUS',
                       type=float,
                       help='The radius (in m) for neighbourhood processing.')
    group.add_argument('--radii-by-lead-time',
                       metavar=('RADII_BY_LEAD_TIME', 'LEAD_TIME_IN_HOURS'),
                       nargs=2,
                       help='The radii for neighbourhood processing '
                       'and the associated lead times at which the radii are '
                       'valid. The radii are in metres whilst the lead time '
                       'has units of hours. The radii and lead times are '
                       'expected as individual comma-separated lists with '
                       'the list of radii given first followed by a list of '
                       'lead times to indicate at what lead time each radii '
                       'should be used. For example: 10000,12000,14000 1,2,3 '
                       'where a lead time of 1 hour uses a radius of 10000m, '
                       'a lead time of 2 hours uses a radius of 12000m, etc.')
    parser.add_argument('--sum_or_fraction',
                        default="fraction",
                        choices=["sum", "fraction"],
                        help='The neighbourhood output can either be in the '
                        'form of a sum of the neighbourhood, or a '
                        'fraction calculated by dividing the sum of the '
                        'neighbourhood by the neighbourhood area. '
                        '"fraction" is the default option.')
    parser.add_argument('--intermediate_filepath',
                        default=None,
                        help='Intermediate filepath for results following '
                        'topographic masked neighbourhood processing of '
                        'land points and prior to collapsing the '
                        'topographic_zone coordinate. Intermediate files '
                        'will not be produced if no topographic masked '
                        'neighbourhood processing occurs.')

    args = parser.parse_args(args=argv)

    cube = load_cube(args.input_filepath)
    mask = load_cube(args.input_mask_filepath, no_lazy_load=True)
    weights = None
    if any([
            'topographic_zone' in coord.name()
            for coord in mask.coords(dim_coords=True)
    ]):

        if mask.attributes['topographic_zones_include_seapoints'] == 'True':
            raise ValueError('The topographic zones mask cube must have been '
                             'masked to exclude sea points, but '
                             'topographic_zones_include_seapoints = True')

        if not args.weights_for_collapsing_dim:
            raise IOError('A weights cube must be provided if using a mask '
                          'of topographic zones to collapse the resulting '
                          'vertical dimension.')

        weights = load_cube(args.weights_for_collapsing_dim, no_lazy_load=True)

    result, intermediate_cube = process(cube, mask, args.radius,
                                        args.radii_by_lead_time, weights,
                                        args.sum_or_fraction,
                                        args.intermediate_filepath)

    save_netcdf(result, args.output_filepath)
    if args.intermediate_filepath:
        save_netcdf(intermediate_cube, args.intermediate_filepath)
Example #24
0
def main(argv=None):
    """Load in arguments and get going."""
    parser = ArgParser(
        description='Apply the requested neighbourhood method via '
        'the NeighbourhoodProcessing plugin to a file '
        'whose data can be loaded as a single iris.cube.Cube.')
    parser.add_argument(
        'neighbourhood_output',
        metavar='NEIGHBOURHOOD_OUTPUT',
        help='The form of the results generated using neighbourhood '
        'processing. If "probabilities" is selected, the mean '
        'probability within a neighbourhood is calculated. If '
        '"percentiles" is selected, then the percentiles are calculated '
        'within a neighbourhood. Calculating percentiles from a '
        'neighbourhood is only supported for a circular neighbourhood. '
        'Options: "probabilities", "percentiles".')
    parser.add_argument('neighbourhood_shape',
                        metavar='NEIGHBOURHOOD_SHAPE',
                        choices=["circular", "square"],
                        help='The shape of the neighbourhood to apply in '
                        'neighbourhood processing. Only a "circular" '
                        'neighbourhood shape is applicable for '
                        'calculating "percentiles" output. '
                        'Options: "circular", "square".')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--radius',
                       metavar='RADIUS',
                       type=float,
                       help='The radius (in m) for neighbourhood processing.')
    group.add_argument('--radii-by-lead-time',
                       metavar=('RADII_BY_LEAD_TIME', 'LEAD_TIME_IN_HOURS'),
                       nargs=2,
                       help='The radii for neighbourhood processing '
                       'and the associated lead times at which the radii are '
                       'valid. The radii are in metres whilst the lead time '
                       'has units of hours. The radii and lead times are '
                       'expected as individual comma-separated lists with '
                       'the list of radii given first followed by a list of '
                       'lead times to indicate at what lead time each radii '
                       'should be used. For example: 10000,12000,14000 1,2,3 '
                       'where a lead time of 1 hour uses a radius of 10000m, '
                       'a lead time of 2 hours uses a radius of 12000m, etc.')
    parser.add_argument('--degrees_as_complex',
                        action='store_true',
                        default=False,
                        help='Set this flag to process angles,'
                        ' eg wind directions, as complex numbers. Not '
                        'compatible with circular kernel, percentiles or '
                        'recursive filter.')
    parser.add_argument('--weighted_mode',
                        action='store_true',
                        default=False,
                        help='For neighbourhood processing using a circular '
                        'kernel, setting the weighted_mode indicates the '
                        'weighting decreases with radius. '
                        'If weighted_mode is not set, a constant '
                        'weighting is assumed. weighted_mode is only '
                        'applicable for calculating "probability" '
                        'neighbourhood output.')
    parser.add_argument('--sum_or_fraction',
                        default="fraction",
                        choices=["sum", "fraction"],
                        help='The neighbourhood output can either be in the '
                        'form of a sum of the neighbourhood, or a '
                        'fraction calculated by dividing the sum of the '
                        'neighbourhood by the neighbourhood area. '
                        '"fraction" is the default option.')
    parser.add_argument('--re_mask',
                        action='store_true',
                        help='If re_mask is set (i.e. True), the original '
                        'un-neighbourhood processed mask is applied to '
                        'mask out the neighbourhood processed dataset. '
                        'If not set, re_mask defaults to False and the '
                        'original un-neighbourhood processed mask is '
                        'not applied. Therefore, the neighbourhood '
                        'processing may result in values being present '
                        'in areas that were originally masked. ')
    parser.add_argument('--percentiles',
                        metavar='PERCENTILES',
                        default=DEFAULT_PERCENTILES,
                        nargs='+',
                        type=float,
                        help='Calculate values at the specified percentiles '
                        'from the neighbourhood surrounding each grid '
                        'point.')
    parser.add_argument('input_filepath',
                        metavar='INPUT_FILE',
                        help='A path to an input NetCDF file to be processed.')
    parser.add_argument('output_filepath',
                        metavar='OUTPUT_FILE',
                        help='The output path for the processed NetCDF.')
    parser.add_argument('--input_mask_filepath',
                        metavar='INPUT_MASK_FILE',
                        help='A path to an input mask NetCDF file to be '
                        'used to mask the input file. '
                        'This is currently only supported for square '
                        'neighbourhoods. The data should contain 1 for '
                        'usable points and 0 for discarded points, e.g. '
                        'a land-mask.')
    parser.add_argument('--halo_radius',
                        metavar='HALO_RADIUS',
                        default=None,
                        type=float,
                        help='radius in metres of excess halo to clip.'
                        ' Used where a larger'
                        ' grid was defined than the standard grid'
                        ' and we want to clip the grid back to the'
                        ' standard grid e.g. for global data'
                        ' regridded to UK area. Default=None')
    parser.add_argument('--apply-recursive-filter',
                        action='store_true',
                        default=False,
                        help='Option to apply the recursive filter to a '
                        'square neighbourhooded output dataset, '
                        'converting it into a Gaussian-like kernel or '
                        'smoothing over short distances. '
                        'The filter uses an alpha '
                        'parameter (0 < alpha < 1) to control what '
                        'proportion of the probability is passed onto '
                        'the next grid-square in the x and y directions. '
                        'The alpha parameter can be set on a grid-square '
                        'by grid-square basis for the x and y directions '
                        'separately (using two arrays of alpha '
                        'parameters of the same dimensionality as the '
                        'domain). Alternatively a single alpha value can '
                        'be set for each of the x and y directions. These'
                        ' methods can be mixed, e.g. an array for the x '
                        'direction and a float for the y direction and '
                        'vice versa. The recursive filter cannot be '
                        'applied to a circular kernel')
    parser.add_argument('--input_filepath_alphas_x_cube',
                        metavar='ALPHAS_X_FILE',
                        help='A path to a NetCDF file describing the alpha '
                        'factors to be used for smoothing in the x '
                        'direction when applying the recursive filter')
    parser.add_argument('--input_filepath_alphas_y_cube',
                        metavar='ALPHAS_Y_FILE',
                        help='A path to a NetCDF file describing the alpha '
                        'factors to be used for smoothing in the y '
                        'direction when applying the recursive filter')
    parser.add_argument('--alpha_x',
                        metavar='ALPHA_X',
                        default=None,
                        type=float,
                        help='A single alpha factor (0 < alpha_x < 1) to be '
                        'applied to every grid square in the x '
                        'direction when applying the recursive filter')
    parser.add_argument('--alpha_y',
                        metavar='ALPHA_Y',
                        default=None,
                        type=float,
                        help='A single alpha factor (0 < alpha_y < 1) to be '
                        'applied to every grid square in the y '
                        'direction when applying the recursive filter.')
    parser.add_argument('--iterations',
                        metavar='ITERATIONS',
                        default=1,
                        type=int,
                        help='Number of times to apply the filter, default=1 '
                        '(typically < 5)')

    args = parser.parse_args(args=argv)

    if (args.neighbourhood_output == "percentiles"
            and args.neighbourhood_shape == "square"):
        parser.wrong_args_error('square', 'neighbourhood_shape')

    if (args.neighbourhood_output == "percentiles" and args.weighted_mode):
        parser.wrong_args_error('weighted_mode',
                                'neighbourhood_shape=percentiles')

    if (args.neighbourhood_output == "probabilities"
            and args.percentiles != DEFAULT_PERCENTILES):
        parser.wrong_args_error('percentiles',
                                'neighbourhood_shape=probabilities')

    if (args.input_mask_filepath and args.neighbourhood_shape == "circular"):
        parser.wrong_args_error('neighbourhood_shape=circular',
                                'input_mask_filepath')

    if args.degrees_as_complex:
        if args.neighbourhood_output == "percentiles":
            parser.error('Cannot generate percentiles from complex numbers')
        if args.neighbourhood_shape == "circular":
            parser.error('Cannot process complex numbers with circular '
                         'neighbourhoods')
        if args.apply_recursive_filter:
            parser.error('Cannot process complex numbers with recursive '
                         'filter')

    cube = load_cube(args.input_filepath)
    if args.degrees_as_complex:
        # convert cube data into complex numbers
        cube.data = WindDirection.deg_to_complex(cube.data)

    if args.radius:
        radius_or_radii = args.radius
        lead_times = None
    elif args.radii_by_lead_time:
        radius_or_radii = args.radii_by_lead_time[0].split(",")
        lead_times = args.radii_by_lead_time[1].split(",")

    if args.input_mask_filepath:
        mask_cube = load_cube(args.input_mask_filepath)
    else:
        mask_cube = None

    if args.neighbourhood_output == "probabilities":
        result = (NeighbourhoodProcessing(args.neighbourhood_shape,
                                          radius_or_radii,
                                          lead_times=lead_times,
                                          weighted_mode=args.weighted_mode,
                                          sum_or_fraction=args.sum_or_fraction,
                                          re_mask=args.re_mask).process(
                                              cube, mask_cube=mask_cube))
    elif args.neighbourhood_output == "percentiles":
        result = (GeneratePercentilesFromANeighbourhood(
            args.neighbourhood_shape,
            radius_or_radii,
            lead_times=lead_times,
            percentiles=args.percentiles).process(cube))

    # If the '--apply-recursive-filter' option has been specified in the
    # input command, pass the neighbourhooded 'result' cube obtained above
    # through the recursive-filter plugin before saving the output.
    # The recursive filter is only applicable to square neighbourhoods.

    if args.neighbourhood_shape == 'square' and args.apply_recursive_filter:

        alphas_x_cube = None
        alphas_y_cube = None

        if args.input_filepath_alphas_x_cube is not None:
            alphas_x_cube = load_cube(args.input_filepath_alphas_x_cube)
        if args.input_filepath_alphas_y_cube is not None:
            alphas_y_cube = load_cube(args.input_filepath_alphas_y_cube)

        result = RecursiveFilter(alpha_x=args.alpha_x,
                                 alpha_y=args.alpha_y,
                                 iterations=args.iterations,
                                 re_mask=args.re_mask).process(
                                     result,
                                     alphas_x=alphas_x_cube,
                                     alphas_y=alphas_y_cube,
                                     mask_cube=mask_cube)

    elif args.neighbourhood_shape == 'circular' and \
            args.apply_recursive_filter:
        raise ValueError('Recursive filter option is not applicable to '
                         'circular neighbourhoods. ')

    if args.degrees_as_complex:
        # convert neighbourhooded cube back to degrees
        result.data = WindDirection.complex_to_deg(result.data)

    if args.halo_radius is not None:
        result = remove_cube_halo(result, args.halo_radius)

    save_netcdf(result, args.output_filepath)
def main(argv=None):
    """Load in arguments for applying coefficients for Ensemble Model Output
       Statistics (EMOS), otherwise known as Non-homogeneous Gaussian
       Regression (NGR). The coefficients are applied to the forecast
       that is supplied, so as to calibrate the forecast. The calibrated
       forecast is written to a netCDF file.
    """
    parser = ArgParser(
        description='Apply coefficients for Ensemble Model Output '
        'Statistics (EMOS), otherwise known as Non-homogeneous '
        'Gaussian Regression (NGR). The supported input formats '
        'are realizations, probabilities and percentiles. '
        'The forecast will be converted to realizations before '
        'applying the coefficients and then converted back to '
        'match the input format.')
    # Filepaths for the forecast, EMOS coefficients and the output.
    parser.add_argument(
        'forecast_filepath',
        metavar='FORECAST_FILEPATH',
        help='A path to an input NetCDF file containing the forecast to be '
        'calibrated. The input format could be either realizations, '
        'probabilities or percentiles.')
    parser.add_argument('coefficients_filepath',
                        metavar='COEFFICIENTS_FILEPATH',
                        help='A path to an input NetCDF file containing the '
                        'coefficients used for calibration.')
    parser.add_argument('output_filepath',
                        metavar='OUTPUT_FILEPATH',
                        help='The output path for the processed NetCDF')
    # Optional arguments.
    parser.add_argument(
        '--num_realizations',
        metavar='NUMBER_OF_REALIZATIONS',
        default=None,
        type=np.int32,
        help='Optional argument to specify the number of '
        'ensemble realizations to produce. '
        'If the current forecast is input as probabilities or '
        'percentiles then this argument is used to create the requested '
        'number of realizations. In addition, this argument is used to '
        'construct the requested number of realizations from the mean '
        'and variance output after applying the EMOS coefficients.'
        'Default will be the number of realizations in the raw input '
        'file, if realizations are provided as input, otherwise if the '
        'input format is probabilities or percentiles, then an error '
        'will be raised if no value is provided.')
    parser.add_argument(
        '--random_ordering',
        default=False,
        action='store_true',
        help='Option to reorder the post-processed forecasts randomly. If not '
        'set, the ordering of the raw ensemble is used. This option is '
        'only valid when the input format is realizations.')
    parser.add_argument(
        '--random_seed',
        metavar='RANDOM_SEED',
        default=None,
        help='Option to specify a value for the random seed for testing '
        'purposes, otherwise, the default random seed behaviour is '
        'utilised. The random seed is used in the generation of the '
        'random numbers used for either the random_ordering option to '
        'order the input percentiles randomly, rather than use the '
        'ordering from the raw ensemble, or for splitting tied values '
        'within the raw ensemble, so that the values from the input '
        'percentiles can be ordered to match the raw ensemble.')
    parser.add_argument(
        '--ecc_bounds_warning',
        default=False,
        action='store_true',
        help='If True, where the percentiles exceed the ECC bounds range, '
        'raise a warning rather than an exception. This occurs when the '
        'current forecast is in the form of probabilities and is '
        'converted to percentiles, as part of converting the input '
        'probabilities into realizations.')
    parser.add_argument(
        '--predictor_of_mean',
        metavar='PREDICTOR_OF_MEAN',
        choices=['mean', 'realizations'],
        default='mean',
        help='String to specify the predictor used to calibrate the forecast '
        'mean. Currently the ensemble mean ("mean") and the ensemble '
        'realizations ("realizations") are supported as options. '
        'Default: "mean".')

    args = parser.parse_args(args=argv)

    current_forecast = load_cube(args.forecast_filepath)
    coeffs = load_cube(args.coefficients_filepath)

    original_current_forecast = current_forecast.copy()

    msg = ("The current forecast has been provided as {0}. "
           "These {0} need to be converted to realizations "
           "for ensemble calibration. The args.num_realizations "
           "argument is used to define the number of realizations "
           "to construct from the input {0}, so if the "
           "current forecast is provided as {0} then "
           "args.num_realizations must be defined.")

    try:
        find_percentile_coordinate(current_forecast)
        input_forecast_type = "percentiles"
    except CoordinateNotFoundError:
        input_forecast_type = "realizations"

    if current_forecast.name().startswith("probability_of"):
        input_forecast_type = "probabilities"
        # If probabilities, convert to percentiles.
        conversion_plugin = GeneratePercentilesFromProbabilities(
            ecc_bounds_warning=args.ecc_bounds_warning)
    elif input_forecast_type == "percentiles":
        # If percentiles, resample percentiles so that the percentiles are
        # evenly spaced.
        conversion_plugin = ResamplePercentiles(
            ecc_bounds_warning=args.ecc_bounds_warning)

    # If percentiles, resample percentiles and then rebadge.
    # If probabilities, generate percentiles and then rebadge.
    if input_forecast_type in ["percentiles", "probabilities"]:
        if not args.num_realizations:
            raise ValueError(msg.format(input_forecast_type))
        current_forecast = conversion_plugin.process(
            current_forecast, no_of_percentiles=args.num_realizations)
        current_forecast = (
            RebadgePercentilesAsRealizations().process(current_forecast))

    # Default number of ensemble realizations is the number in
    # the raw forecast.
    if not args.num_realizations:
        args.num_realizations = len(
            current_forecast.coord('realization').points)

    # Apply coefficients as part of Ensemble Model Output Statistics (EMOS).
    ac = ApplyCoefficientsFromEnsembleCalibration(
        current_forecast,
        coeffs,
        predictor_of_mean_flag=args.predictor_of_mean)
    calibrated_predictor, calibrated_variance = ac.process()

    # If input forecast is probabilities, convert output into probabilities.
    # If input forecast is percentiles, convert output into percentiles.
    # If input forecast is realizations, convert output into realizations.
    if input_forecast_type == "probabilities":
        result = GenerateProbabilitiesFromMeanAndVariance().process(
            calibrated_predictor, calibrated_variance,
            original_current_forecast)
    elif input_forecast_type == "percentiles":
        perc_coord = find_percentile_coordinate(original_current_forecast)
        result = GeneratePercentilesFromMeanAndVariance().process(
            calibrated_predictor,
            calibrated_variance,
            percentiles=perc_coord.points)
    elif input_forecast_type == "realizations":
        # Ensemble Copula Coupling to generate realizations
        # from mean and variance.
        percentiles = GeneratePercentilesFromMeanAndVariance().process(
            calibrated_predictor,
            calibrated_variance,
            no_of_percentiles=args.num_realizations)
        result = EnsembleReordering().process(
            percentiles,
            current_forecast,
            random_ordering=args.random_ordering,
            random_seed=args.random_seed)
    save_netcdf(result, args.output_filepath)
Example #26
0
 def test_add_least_significant_digit(self):
     """Test bitshaving adds correct metadata"""
     save_netcdf(self.cube, self.filepath, least_significant_digit=2)
     cube = load_cube(self.filepath)
     self.assertEqual(cube.attributes["least_significant_digit"], 2)
Example #27
0
def main(argv=None):
    """Load in arguments and get going."""
    parser = ArgParser(
        description="Calculate the continuous falling snow level ")
    parser.add_argument("temperature",
                        metavar="TEMPERATURE",
                        help="Path to a NetCDF file of air temperatures at"
                        " heights (m) at the points for which the continuous "
                        "falling snow level is being calculated.")
    parser.add_argument("relative_humidity",
                        metavar="RELATIVE_HUMIDITY",
                        help="Path to a NetCDF file of relative_humidities at"
                        " heights (m) at the points for which the continuous "
                        "falling snow level is being calculated.")
    parser.add_argument("pressure",
                        metavar="PRESSURE",
                        help="Path to a NetCDF file of air pressures at"
                        " heights (m) at the points for which the continuous "
                        "falling snow level is being calculated.")
    parser.add_argument("orography",
                        metavar="OROGRAPHY",
                        help="Path to a NetCDF file containing "
                        "the orography height in m of the terrain "
                        "over which the continuous falling snow level is "
                        "being calculated.")
    parser.add_argument("land_sea_mask",
                        metavar="LAND_SEA_MASK",
                        help="Path to a NetCDF file containing "
                        "the binary land-sea mask for the points "
                        "for which the continuous falling snow level is "
                        "being calculated. Land points are set to 1, sea "
                        "points are set to 0.")
    parser.add_argument("output_filepath",
                        metavar="OUTPUT_FILE",
                        help="The output path for the processed NetCDF")
    parser.add_argument("--precision",
                        metavar="NEWTON_PRECISION",
                        default=0.005,
                        type=float,
                        help="Precision to which the wet bulb temperature "
                        "is required: This is used by the Newton iteration "
                        "default value is 0.005")
    parser.add_argument("--falling_level_threshold",
                        metavar="FALLING_LEVEL_THRESHOLD",
                        default=90.0,
                        type=float,
                        help=("Cutoff threshold for the wet-bulb integral used"
                              " to calculate the falling snow level. This "
                              "threshold indicates the level at which falling "
                              "snow is deemed to have melted to become rain. "
                              "The default value is 90.0, an empirically "
                              "derived value."))
    args = parser.parse_args(args=argv)

    # Load Cubes
    temperature = load_cube(args.temperature, no_lazy_load=True)
    relative_humidity = load_cube(args.relative_humidity, no_lazy_load=True)
    pressure = load_cube(args.pressure, no_lazy_load=True)
    orog = load_cube(args.orography, no_lazy_load=True)
    land_sea = load_cube(args.land_sea_mask, no_lazy_load=True)

    # Process Cube
    result = process(temperature, relative_humidity, pressure, orog, land_sea,
                     args.precision, args.falling_level_threshold)

    # Save Cube
    save_netcdf(result, args.output_filepath)
Example #28
0
def main(argv=None):
    """Load in arguments and ensure they are set correctly.
       Then load in the data to blend and calculate default weights
       using the method chosen before carrying out the blending."""
    parser = ArgParser(
        description='Calculate the default weights to apply in weighted '
        'blending plugins using the ChooseDefaultWeightsLinear or '
        'ChooseDefaultWeightsNonLinear plugins. Then apply these '
        'weights to the dataset using the BasicWeightedAverage plugin.'
        ' Required for ChooseDefaultWeightsLinear: y0val and ynval.'
        ' Required for ChooseDefaultWeightsNonLinear: cval.'
        ' Required for ChooseWeightsLinear with dict: wts_dict.')

    parser.add_argument('--wts_calc_method',
                        metavar='WEIGHTS_CALCULATION_METHOD',
                        choices=['linear', 'nonlinear', 'dict'],
                        default='linear',
                        help='Method to use to calculate '
                        'weights used in blending. "linear" (default): '
                        'calculate linearly varying blending weights. '
                        '"nonlinear": calculate blending weights that decrease'
                        ' exponentially with increasing blending coordinate. '
                        '"dict": calculate weights using a dictionary passed '
                        'in as a command line argument.')

    parser.add_argument('coordinate',
                        type=str,
                        metavar='COORDINATE_TO_AVERAGE_OVER',
                        help='The coordinate over which the blending '
                        'will be applied.')
    parser.add_argument('--coordinate_unit',
                        metavar='UNIT_STRING',
                        default='hours since 1970-01-01 00:00:00',
                        help='Units for blending coordinate. Default= '
                        'hours since 1970-01-01 00:00:00')
    parser.add_argument('--calendar',
                        metavar='CALENDAR',
                        help='Calendar for time coordinate. Default=gregorian')
    parser.add_argument('--cycletime',
                        metavar='CYCLETIME',
                        type=str,
                        help='The forecast reference time to be used after '
                        'blending has been applied, in the format '
                        'YYYYMMDDTHHMMZ. If not provided, the blended file '
                        'will take the latest available forecast reference '
                        'time from the input cubes supplied.')
    parser.add_argument('--model_id_attr',
                        metavar='MODEL_ID_ATTR',
                        type=str,
                        default="mosg__model_configuration",
                        help='The name of the netCDF file attribute to be '
                        'used to identify the source model for '
                        'multi-model blends. Default assumes Met Office '
                        'model metadata. Must be present on all input '
                        'files if blending over models.')
    parser.add_argument('--spatial_weights_from_mask',
                        action='store_true',
                        default=False,
                        help='If set this option will result in the generation'
                        ' of spatially varying weights based on the'
                        ' masks of the data we are blending. The'
                        ' one dimensional weights are first calculated '
                        ' using the chosen weights calculation method,'
                        ' but the weights will then be adjusted spatially'
                        ' based on where there is masked data in the data'
                        ' we are blending. The spatial weights are'
                        ' calculated using the'
                        ' SpatiallyVaryingWeightsFromMask plugin.')
    parser.add_argument('weighting_mode',
                        metavar='WEIGHTED_BLEND_MODE',
                        choices=['weighted_mean', 'weighted_maximum'],
                        help='The method used in the weighted blend. '
                        '"weighted_mean": calculate a normal weighted'
                        ' mean across the coordinate. '
                        '"weighted_maximum": multiplies the values in the'
                        ' coordinate by the weights, and then takes the'
                        ' maximum.')

    parser.add_argument('input_filepaths',
                        metavar='INPUT_FILES',
                        nargs="+",
                        help='Paths to input files to be blended.')
    parser.add_argument('output_filepath',
                        metavar='OUTPUT_FILE',
                        help='The output path for the processed NetCDF.')

    spatial = parser.add_argument_group(
        'Spatial weights from mask options',
        'Options for calculating the spatial weights using the '
        'SpatiallyVaryingWeightsFromMask plugin.')
    spatial.add_argument('--fuzzy_length',
                         metavar='FUZZY_LENGTH',
                         type=float,
                         default=20000,
                         help='When calculating spatially varying weights we'
                         ' can smooth the weights so that areas close to'
                         ' areas that are masked have lower weights than'
                         ' those further away. This fuzzy length controls'
                         ' the scale over which the weights are smoothed.'
                         ' The fuzzy length is in terms of m, the'
                         ' default is 20km. This distance is then'
                         ' converted into a number of grid squares,'
                         ' which does not have to be an integer. Assumes'
                         ' the grid spacing is the same in the x and y'
                         ' directions, and raises an error if this is not'
                         ' true. See SpatiallyVaryingWeightsFromMask for'
                         ' more detail.')

    linear = parser.add_argument_group(
        'linear weights options', 'Options for the linear weights '
        'calculation in '
        'ChooseDefaultWeightsLinear')
    linear.add_argument('--y0val',
                        metavar='LINEAR_STARTING_POINT',
                        type=float,
                        help='The relative value of the weighting start point '
                        '(lowest value of blend coord) for choosing default '
                        'linear weights. This must be a positive float or 0.')
    linear.add_argument('--ynval',
                        metavar='LINEAR_END_POINT',
                        type=float,
                        help='The relative value of the weighting '
                        'end point (highest value of blend coord) for choosing'
                        ' default linear weights. This must be a positive '
                        'float or 0.  Note that if blending over forecast '
                        'reference time, ynval >= y0val would normally be '
                        'expected (to give greater weight to the more recent '
                        'forecast).')

    nonlinear = parser.add_argument_group(
        'nonlinear weights options', 'Options for the non-linear '
        'weights calculation in '
        'ChooseDefaultWeightsNonLinear')
    nonlinear.add_argument('--cval',
                           metavar='NON_LINEAR_FACTOR',
                           type=float,
                           help='Factor used to determine how skewed the '
                           'non linear weights will be. '
                           'A value of 1 implies equal weighting. If not '
                           'set, a default value of cval=0.85 is set.')

    wts_dict = parser.add_argument_group(
        'dict weights options', 'Options for linear weights to be '
        'calculated based on parameters '
        'read from a json file dict')
    wts_dict.add_argument('--wts_dict',
                          metavar='WEIGHTS_DICTIONARY',
                          help='Path to json file containing dictionary from '
                          'which to calculate blending weights. Dictionary '
                          'format is as specified in the improver.blending.'
                          'weights.ChooseWeightsLinear plugin.')
    wts_dict.add_argument('--weighting_coord',
                          metavar='WEIGHTING_COORD',
                          default='forecast_period',
                          help='Name of '
                          'coordinate over which linear weights should be '
                          'scaled. This coordinate must be avilable in the '
                          'weights dictionary.')

    args = parser.parse_args(args=argv)

    # if the linear weights method is called with non-linear args or vice
    # versa, exit with error
    if (args.wts_calc_method == "linear") and args.cval:
        parser.wrong_args_error('cval', 'linear')
    if ((args.wts_calc_method == "nonlinear")
            and np.any([args.y0val, args.ynval])):
        parser.wrong_args_error('y0val, ynval', 'non-linear')
    if (args.wts_calc_method == "dict") and not args.wts_dict:
        parser.error('Dictionary is required if --wts_calc_method="dict"')

    # set blending coordinate units
    if "time" in args.coordinate:
        coord_unit = Unit(args.coordinate_unit, args.calendar)
    elif args.coordinate_unit != 'hours since 1970-01-01 00:00:00.':
        coord_unit = args.coordinate_unit
    else:
        coord_unit = 'no_unit'

    # For blending across models, only blending across "model_id" is directly
    # supported. This is because the blending coordinate must be sortable, in
    # order to ensure that the data cube and the weights cube have coordinates
    # in the same order for blending. Whilst the model_configuration is
    # sortable itself, as it is associated with model_id, which is the
    # dimension coordinate, sorting the model_configuration coordinate can
    # result in the model_id coordinate becoming non-monotonic. As dimension
    # coordinates must be monotonic, this leads to the model_id coordinate
    # being demoted to an auxiliary coordinate. Therefore, for simplicity
    # model_id is used as the blending coordinate, instead of
    # model_configuration.
    # TODO: Support model_configuration as a blending coordinate directly.
    if args.coordinate == "model_configuration":
        blend_coord = "model_id"
        dict_coord = "model_configuration"
    else:
        blend_coord = args.coordinate
        dict_coord = args.coordinate

    # load cubes to be blended
    cubelist = load_cubelist(args.input_filepaths)

    # determine whether or not to equalise forecast periods for model
    # blending weights calculation
    weighting_coord = (args.weighting_coord
                       if args.weighting_coord else "forecast_period")

    # prepare cubes for weighted blending
    merger = MergeCubesForWeightedBlending(blend_coord,
                                           weighting_coord=weighting_coord,
                                           model_id_attr=args.model_id_attr)
    cube = merger.process(cubelist, cycletime=args.cycletime)

    # if the coord for blending does not exist or has only one value,
    # update metadata only
    coord_names = [coord.name() for coord in cube.coords()]
    if (blend_coord not in coord_names) or (len(
            cube.coord(blend_coord).points) == 1):
        result = cube.copy()
        conform_metadata(result, cube, blend_coord, cycletime=args.cycletime)
        # raise a warning if this happened because the blend coordinate
        # doesn't exist
        if blend_coord not in coord_names:
            warnings.warn('Blend coordinate {} is not present on input '
                          'data'.format(blend_coord))

    # otherwise, calculate weights and blend across specified dimension
    else:
        weights = calculate_blending_weights(
            cube,
            blend_coord,
            args.wts_calc_method,
            wts_dict=args.wts_dict,
            weighting_coord=args.weighting_coord,
            coord_unit=coord_unit,
            y0val=args.y0val,
            ynval=args.ynval,
            cval=args.cval,
            dict_coord=dict_coord)

        if args.spatial_weights_from_mask:
            check_if_grid_is_equal_area(cube)
            grid_cells_x, _ = convert_distance_into_number_of_grid_cells(
                cube, args.fuzzy_length, int_grid_cells=False)
            SpatialWeightsPlugin = SpatiallyVaryingWeightsFromMask(
                grid_cells_x)
            weights = SpatialWeightsPlugin.process(cube, weights, blend_coord)

        # blend across specified dimension
        BlendingPlugin = WeightedBlendAcrossWholeDimension(
            blend_coord, args.weighting_mode, cycletime=args.cycletime)
        result = BlendingPlugin.process(cube, weights=weights)

    save_netcdf(result, args.output_filepath)
Example #29
0
def main(argv=None):
    """Load in arguments and get going."""
    parser = ArgParser(
        description=('Reads input orography and landmask fields. Creates '
                     'a series of topographic zone weights to indicate '
                     'where an orography point sits within the defined '
                     'topographic bands. If the orography point is in the '
                     'centre of a topographic band, then a single band will '
                     'have a weight of 1.0. If the orography point is at the '
                     'edge of a topographic band, then the upper band will '
                     'have a 0.5 weight whilst the lower band will also have '
                     'a 0.5 weight. Otherwise, the weight will vary linearly '
                     'between the centre of a topographic band and the edge.'))
    parser.add_argument('input_filepath_standard_orography',
                        metavar='INPUT_FILE_STANDARD_OROGRAPHY',
                        help=('A path to an input NetCDF orography file to '
                              'be processed'))
    parser.add_argument('output_filepath', metavar='OUTPUT_FILE',
                        help='The output path for the processed NetCDF.')
    parser.add_argument('--input_filepath_landmask', metavar='INPUT_FILE_LAND',
                        help=('A path to an input NetCDF land mask file to be '
                              'processed. If provided, sea points will be '
                              'masked and set to the default fill value. If '
                              'no land mask is provided, weights will be '
                              'generated for sea points as well as land, '
                              'included in the appropriate topographic band.'))

    parser.add_argument('--force', dest='force', default=False,
                        action='store_true',
                        help=('If keyword is set (i.e. True), ancillaries '
                              'will be generated even if doing so will '
                              'overwrite existing files'))
    parser.add_argument('--thresholds_filepath',
                        metavar='THRESHOLDS_FILEPATH',
                        default=None,
                        help=("The path to a json file which can be used "
                              "to set the number and size of topographic "
                              "bounds. If unset a default bounds dictionary"
                              " will be used:"
                              "{'bounds': [[-500., 50.], [50., 100.], "
                              "[100., 150.],[150., 200.], [200., 250.], "
                              "[250., 300.], [300., 400.], [400., 500.], "
                              "[500., 650.],[650., 800.], [800., 950.], "
                              "[950., 6000.]], 'units': 'm'}"))
    args = parser.parse_args(args=argv)

    thresholds_dict = load_json_or_none(args.thresholds_filepath)

    if not os.path.exists(args.output_filepath) or args.force:
        orography = load_cube(args.input_filepath_standard_orography)
        landmask = None
        if args.input_filepath_landmask:
            try:
                landmask = load_cube(args.input_filepath_landmask)
            except IOError as err:
                msg = ("Loading land mask has been unsuccessful: {}. "
                       "This may be because the land mask could not be "
                       "located in {}; run "
                       'improver-generate-landmask-ancillary first.').format(
                           err, args.input_filepath_landmask)
                raise IOError(msg)

        result = process(landmask, orography, thresholds_dict)
        # Save Cube
        save_netcdf(result, args.output_filepath)
    else:
        print('File already exists here: ', args.output_filepath)
Example #30
0
def main(argv=None):
    """Load in arguments for the cube combiner plugin.
    """
    parser = ArgParser(
        description="Combine the input files into a single file using "
                    "the requested operation e.g. + - min max etc.")
    parser.add_argument("input_filenames", metavar="INPUT_FILENAMES",
                        nargs="+", type=str,
                        help="Paths to the input NetCDF files. Each input"
                        " file should be able to be loaded as a single "
                        " iris.cube.Cube instance. The resulting file"
                        " metadata will be based on the first file but"
                        " its metadata can be overwritten via"
                        " the metadata_jsonfile option.")
    parser.add_argument("output_filepath", metavar="OUTPUT_FILE",
                        help="The output path for the processed NetCDF.")
    parser.add_argument("--operation", metavar="OPERATION",
                        default="+",
                        choices=["+", "-", "*",
                                 "add", "subtract", "multiply",
                                 "min", "max", "mean"],
                        help="Operation to use in combining NetCDF datasets"
                        " Default=+ i.e. add ", type=str)
    parser.add_argument("--new-name", metavar="NEW_NAME",
                        default=None,
                        help="New name for the resulting dataset. Will"
                        " default to the name of the first dataset if "
                        "not set.", type=str)
    parser.add_argument("--metadata_jsonfile", metavar="METADATA_JSONFILE",
                        default=None,
                        help="Filename for the json file containing "
                        "required changes to the metadata. "
                        " default=None", type=str)
    parser.add_argument('--warnings_on', action='store_true',
                        help="If warnings_on is set (i.e. True), "
                        "Warning messages where metadata do not match "
                        "will be given. Default=False", default=False)

    args = parser.parse_args(args=argv)

    new_metadata = load_json_or_none(args.metadata_jsonfile)
    # Load cubes
    cubelist = iris.cube.CubeList([])
    new_cube_name = args.new_name
    for filename in args.input_filenames:
        new_cube = load_cube(filename)
        cubelist.append(new_cube)
        if new_cube_name is None:
            new_cube_name = new_cube.name()
        if args.warnings_on:
            if (args.new_name is None and
                    new_cube_name != new_cube.name()):
                msg = ("Defaulting to first cube name, {} but combining with"
                       "a cube with name, {}.".format(
                            new_cube_name, new_cube.name()))
                warnings.warn(msg)

    # Process Cube
    result = process(cubelist, args.operation, new_cube_name,
                     new_metadata, args.warnings_on)

    # Save Cube
    save_netcdf(result, args.output_filepath)