def test_adding_multiple_arguments(self): """Test that we can successfully add multiple arguments to the ArgParser.""" # we will not actually pass anything in, so the Namespace will receive # the defaults (if any) - only check the keys of the Namespace derived # dictionary args_to_add = [(['--foo'], {}), (['--bar', '--b'], {})] expected_namespace_keys = ['foo', 'bar'] # + compulsory... # explicitly pass nothing in - will only have compulsory arguments # and the ones we added... parser = ArgParser(central_arguments=None, specific_arguments=None) parser.add_arguments(args_to_add) result_args = parser.parse_args() result_args = vars(result_args).keys() # we could also add compulsory arguments to expected_namespace_keys # and then assertItemsEqual - (order unimportant), but this # is unnecessary - just use loop: # (or we could patch compulsory arguments to be an empty dictionary) for expected_arg in expected_namespace_keys: self.assertIn(expected_arg, result_args)
def test_argparser_compulsory_args_has_profile(self): """Test that creating an ArgParser instance with the compulsory arguments adds the profiling options.""" expected_profile_options = ['profile', 'profile_file'] parser = ArgParser(central_arguments=None, specific_arguments=None) args = parser.parse_args() args = vars(args).keys() self.assertCountEqual(args, expected_profile_options)
def main(argv=None): """Generate target grid with a halo around the source file grid.""" parser = ArgParser(description='Generate grid with halo from a source ' 'domain input file. The grid is populated with zeroes.') parser.add_argument('input_file', metavar='INPUT_FILE', help="NetCDF file " "containing data on a source grid.") parser.add_argument('output_file', metavar='OUTPUT_FILE', help="NetCDF " "file defining the target grid with additional halo.") parser.add_argument('--halo_radius', metavar='HALO_RADIUS', default=162000, type=float, help="Size of halo (in m) with which to " "pad the input grid. Default is 162 000 m.") args = parser.parse_args(args=argv) # Load Cube cube = load_cube(args.input_file) # Process Cube result = process(cube, args.halo_radius) # Save Cube save_netcdf(result, args.output_file)
def main(argv=None): """Load in arguments and get going.""" parser = ArgParser(description=('Read the input landmask, and correct ' 'to boolean values.')) parser.add_argument('--force', dest='force', default=False, action='store_true', help=('If True, ancillaries will be generated ' 'even if doing so will overwrite existing ' 'files.')) parser.add_argument('input_filepath_standard', metavar='INPUT_FILE_STANDARD', help='A path to an input NetCDF file to be processed') parser.add_argument('output_filepath', metavar='OUTPUT_FILE', help='The output path for the processed NetCDF') args = parser.parse_args(args=argv) # Check if improver ancillary already exists. if not os.path.exists(args.output_filepath) or args.force: landmask = load_cube(args.input_filepath_standard) land_binary_mask = CorrectLandSeaMask().process(landmask) save_netcdf(land_binary_mask, args.output_filepath) else: print('File already exists here: ', args.output_filepath)
def main(argv=None): """ Calculate the UV index using the data in the input cubes.""" parser = ArgParser( description="Calculates the UV index.") parser.add_argument("radiation_flux_upward", metavar="RADIATION_FLUX_UPWARD", help="Path to a NetCDF file of radiation flux " "in uv upward at surface.") parser.add_argument("radiation_flux_downward", metavar="RADIATION_FLUX_DOWNWARD", help="Path to a NetCDF file of radiation flux " "in uv downward at surface.") parser.add_argument("output_filepath", metavar="OUTPUT_FILE", help="The output path for the processed NetCDF") args = parser.parse_args(args=argv) # Load Cube rad_uv_up = load_cube(args.radiation_flux_upward) rad_uv_down = load_cube(args.radiation_flux_downward) # Process Cube result = process(rad_uv_up, rad_uv_down) # Save Cube save_netcdf(result, args.output_filepath)
def test_adding_single_argument_with_unexpected_length_argspec(self): """Test that attempting to add an argument to the ArgParser when the wrong format argspec raises an exception.""" # length of argspec is 3 - this is unexpected args_to_add = [(['--foo'], 'bar', {})] parser = ArgParser(central_arguments=None, specific_arguments=None) with self.assertRaises(AttributeError): parser.add_arguments(args_to_add)
def main(argv=None): """Parser to accept input data and an output destination before invoking the weather symbols plugin. """ diagnostics = interrogate_decision_tree('high_resolution') n_files = len(diagnostics) dlist = (' - {}\n' * n_files) diagnostics_global = interrogate_decision_tree('global') n_files_global = len(diagnostics_global) dlist_global = (' - {}\n' * n_files_global) parser = ArgParser( description='Calculate gridded weather symbol codes.\nThis plugin ' 'requires a specific set of input diagnostics, where data\nmay be in ' 'any units to which the thresholds given below can\nbe converted:\n' + dlist.format(*diagnostics) + '\n\n or for global data\n\n' + dlist_global.format(*diagnostics_global), formatter_class=RawTextHelpFormatter) parser.add_argument( 'input_filepaths', metavar='INPUT_FILES', nargs="+", help='Paths to files containing the required input diagnostics.') parser.add_argument('output_filepath', metavar='OUTPUT_FILE', help='The output path for the processed NetCDF.') parser.add_argument("--wxtree", metavar="WXTREE", default="high_resolution", choices=["high_resolution", "global"], help="Weather Code tree.\n" "Choices are high_resolution or global.\n" "Default=high_resolution.", type=str) args = parser.parse_args(args=argv) # Load Cube cubes = load_cubelist(args.input_filepaths, no_lazy_load=True) required_number_of_inputs = n_files if args.wxtree == 'global': required_number_of_inputs = n_files_global if len(cubes) != required_number_of_inputs: msg = ('Incorrect number of inputs: files {} gave {} cubes' + ', {} required').format(args.input_filepaths, len(cubes), required_number_of_inputs) raise argparse.ArgumentTypeError(msg) # Process Cube result = process(cubes, args.wxtree) # Save Cube save_netcdf(result, args.output_filepath)
def test_adding_argument_with_defined_kwargs_dict_has_defualt(self): """Test that we can successfully add an argument to the ArgParser, when the argspec contained kwargs, and that the default value is captured.""" args_to_add = [(['--one'], {'default': 1})] parser = ArgParser(central_arguments=None, specific_arguments=None) parser.add_arguments(args_to_add) result_args = parser.parse_args() # `--one` was not passed in, so we pick up the default - let's check # they agree... self.assertEqual(1, result_args.one)
def test_adding_argument_with_defined_kwargs_dict(self): """Test that we can successfully add an argument to the ArgParser, when the argspec contained kwargs.""" # length of argspec is 2... args_to_add = [(['--foo'], {'default': 1})] expected_arg = 'foo' parser = ArgParser(central_arguments=None, specific_arguments=None) parser.add_arguments(args_to_add) result_args = parser.parse_args() result_args = vars(result_args).keys() self.assertIn(expected_arg, result_args)
def test_create_argparser_with_no_arguments(self): """Test that creating an ArgParser with no arguments has no arguments.""" compulsory_arguments = {} # it doesn't matter what the centralized arguments are, because we # select None of them - we only need to patch the COMPULSORY_ARGUMENTS # to ensure there are none of them with patch('improver.argparser.ArgParser.COMPULSORY_ARGUMENTS', compulsory_arguments): parser = ArgParser(central_arguments=None, specific_arguments=None) args = parser.parse_args() args = vars(args).keys() self.assertEqual(len(args), 0)
def test_create_argparser_only_compulsory_arguments(self): """Test that creating an ArgParser with only compulsory arguments adds only the compulsory arguments.""" compulsory_arguments = {'foo': (['--foo'], {})} # it doesn't matter what the centralized arguments are, because we # select None of them - only patch COMPULSORY_ARGUMENTS so we know # what to expect with patch('improver.argparser.ArgParser.COMPULSORY_ARGUMENTS', compulsory_arguments): parser = ArgParser(central_arguments=None, specific_arguments=None) args = parser.parse_args() args = vars(args).keys() self.assertCountEqual(args, ['foo'])
def main(argv=None): """Load in arguments to calculate mean wind direction from ensemble realizations.""" cli_specific_arguments = [(['--backup_method'], {'dest': 'backup_method', 'default': 'neighbourhood', 'choices': ['neighbourhood', 'first_realization'], 'help': ('Backup method to use if ' 'there is low confidence in' ' the wind_direction. ' 'Options are first_realization' ' or neighbourhood, ' 'first_realization should only ' 'be used with global lat-lon data. ' 'Default is neighbourhood.')})] cli_definition = {'central_arguments': ('input_file', 'output_file'), 'specific_arguments': cli_specific_arguments, 'description': ('Run wind direction to calculate mean' ' wind direction from ' 'ensemble realizations')} args = ArgParser(**cli_definition).parse_args(args=argv) wind_direction = load_cube(args.input_filepath) # Returns 3 cubes - r_vals and confidence_measure cubes currently # only contain experimental data to be used for further research. bmethod = args.backup_method cube_mean_wdir, _, _ = ( WindDirection(backup_method=bmethod).process(wind_direction)) save_netcdf(cube_mean_wdir, args.output_filepath)
def main(argv=None): """ Translate meta-data relating to the grid_id attribute from StaGE version 1.1.0 to StaGE version 1.2.0. """ cli_definition = { 'central_arguments': ['input_file', 'output_file'], 'specific_arguments': [], 'description': ('Translates meta-data relating to the ' 'grid_id attribute from StaGE version ' '1.1.0 to StaGE version 1.2.0. ' 'Files that have no "grid_id" attribute ' 'are not recognised as v1.1.0 and are ' 'not changed. Has no effect if ' 'input_file and output_file are the ' 'same and contain a cube with non ' 'v1.1.0 meta-data') } args = ArgParser(**cli_definition).parse_args(args=argv) cube = load_cube(args.input_filepath) cube_changed = update_stage_v110_metadata(cube) # Create normalised file paths to make them comparable in_file_norm = os.path.normpath(args.input_filepath) out_file_norm = os.path.normpath(args.output_filepath) if cube_changed or in_file_norm != out_file_norm: # Ensure data are not lazy in case we are writing back to the same # file. cube.data save_netcdf(cube, args.output_filepath)
def main(argv=None): """ Translate meta-data relating to the grid_id attribute from StaGE version 1.1.0 to StaGE version 1.2.0. """ cli_definition = { 'central_arguments': ['input_file', 'output_file'], 'specific_arguments': [], 'description': ('Translates meta-data relating to the ' 'grid_id attribute from StaGE version ' '1.1.0 to StaGE version 1.2.0. ' 'Files that have no "grid_id" attribute ' 'are not recognised as v1.1.0 and are ' 'not changed. Has no effect if ' 'input_file and output_file are the ' 'same and contain a cube with non ' 'v1.1.0 meta-data') } args = ArgParser(**cli_definition).parse_args(args=argv) # Load Cube cube = load_cube(args.input_filepath, no_lazy_load=True) # Process Cube cube_changed = process(cube) # Save Cube # Create normalised file paths to make them comparable in_file_norm = os.path.normpath(args.input_filepath) out_file_norm = os.path.normpath(args.output_filepath) if cube_changed or in_file_norm != out_file_norm: save_netcdf(cube, args.output_filepath)
def test_create_argparser_compulsory_and_specfic_arguments(self): """Test that creating an ArgParser with compulsory and specific arguments adds both of these and no others.""" compulsory_arguments = {'foo': (['--foo'], {})} specific_arguments = [(['--bar'], {})] # it doesn't matter what the centralized arguments are, because we # select None of them - patch only the COMPULSORY_ARGUMENTS so we know # that `foo` is added from here with patch('improver.argparser.ArgParser.COMPULSORY_ARGUMENTS', compulsory_arguments): parser = ArgParser(central_arguments=None, specific_arguments=specific_arguments) args = parser.parse_args() args = vars(args).keys() self.assertCountEqual(args, ['foo', 'bar'])
def test_create_argparser_only_specific_arguments(self): """Test that creating an ArgParser with only specific arguments adds only the specific arguments.""" compulsory_arguments = {} specific_arguments = [(['--foo'], {})] # it doesn't matter what the centralized arguments are, because we # select None of them - patch the COMPULSORY_ARGUMENTS to be an empty # dict so that we don't add any of them with patch('improver.argparser.ArgParser.COMPULSORY_ARGUMENTS', compulsory_arguments): parser = ArgParser(central_arguments=None, specific_arguments=specific_arguments) args = parser.parse_args() args = vars(args).keys() self.assertCountEqual(args, ['foo'])
def main(argv=None): """Load in the arguments and ensure they are set correctly. Then run the time-lagged ensembles on the input cubes. """ parser = ArgParser( description='This combines the realizations from different forecast ' 'cycles into one cube. It does this by taking an input ' 'CubeList containing forecasts from different cycles and ' 'merges them into a single cube, removing any metadata ' 'that does not match.') parser.add_argument('input_filenames', metavar='INPUT_FILENAMES', nargs="+", type=str, help='Paths to input NetCDF files for the time-lagged ' 'ensemble to combine the realizations.') parser.add_argument('output_file', metavar='OUTPUT_FILE', help='The output file for the processed NetCDF.') args = parser.parse_args(args=argv) # Load the cubes cubes = iris.cube.CubeList([]) for filename in args.input_filenames: new_cube = load_cube(filename) cubes.append(new_cube) # Process Cube result = process(cubes) # Save Cube save_netcdf(result, args.output_file)
def test_create_argparser_compulsory_and_centralized_arguments(self): """Test that creating an ArgParser with compulsory and centralized arguments adds both of these and no others.""" compulsory_arguments = {'foo': (['--foo'], {})} centralized_arguments = {'bar': (['--bar'], {})} # patch the COMPULSORY_ARGUMENTS so we know that `foo` exists # and the CENTRALIZED_ARGUMENTS so we know that `bar` exists. with patch('improver.argparser.ArgParser.COMPULSORY_ARGUMENTS', compulsory_arguments): with patch('improver.argparser.ArgParser.CENTRALIZED_ARGUMENTS', centralized_arguments): parser = ArgParser(central_arguments=['bar'], specific_arguments=None) args = parser.parse_args() args = vars(args).keys() self.assertCountEqual(args, ['foo', 'bar'])
def test_create_argparser_all_arguments(self): """Test that creating an ArgParser with compulsory, centralized and specific arguments adds the arguments from all 3 collections.""" compulsory_arguments = {'foo': (['--foo'], {})} centralized_arguments = {'bar': (['--bar'], {})} specific_arguments = [(['--baz'], {})] # patch both the COMPULSORY_ARGUMENTS and CENTRALIZED_ARGUMENTS, so # that `foo` and `bar` are added from these (respectively) with patch('improver.argparser.ArgParser.COMPULSORY_ARGUMENTS', compulsory_arguments): with patch('improver.argparser.ArgParser.CENTRALIZED_ARGUMENTS', centralized_arguments): parser = ArgParser(central_arguments=['bar'], specific_arguments=specific_arguments) args = parser.parse_args() args = vars(args).keys() self.assertCountEqual(args, ['foo', 'bar', 'baz'])
def test_create_argparser_only_centralized_arguments(self): """Test that creating an ArgParser with only centralized arguments adds only the selected centralized arguments.""" compulsory_arguments = {} centralized_arguments = {'foo': (['--foo'], {})} # patch the COMPULSORY_ARGUMENTS to an empty dict (so there are none) # and patch CENTRALIZED_ARGUMENTS so we know that `foo` can be selected # from it with patch('improver.argparser.ArgParser.COMPULSORY_ARGUMENTS', compulsory_arguments): with patch('improver.argparser.ArgParser.CENTRALIZED_ARGUMENTS', centralized_arguments): parser = ArgParser(central_arguments=['foo'], specific_arguments=None) args = parser.parse_args() args = vars(args).keys() self.assertCountEqual(args, ['foo'])
def test_error_raised(self, args='foo', method='bar'): """Test that an exception is raised containing the args and method.""" msg = ("Method: {} does not accept arguments: {}".format(method, args)) # argparser will write to stderr independently of SystemExit with open(os.devnull, 'w') as file_handle: with patch('sys.stderr', file_handle): with self.assertRaises(SystemExit, msg=msg): ArgParser().wrong_args_error(args, method)
def test_create_argparser_fails_with_unknown_centralized_argument(self): """Test that we raise an exception when attempting to retrieve centralized arguments which are not centralized argument dictionary.""" centralized_arguments = {'foo': (['--foo'], {})} central_args_to_fetch = ('missing_central_arg', ) # patch the CENTRALIZED_ARGUMENTS so we know that `missing_central_arg` # is not there, and we can raise an exception with patch('improver.argparser.ArgParser.CENTRALIZED_ARGUMENTS', centralized_arguments): with self.assertRaises(KeyError): ArgParser(central_arguments=central_args_to_fetch, specific_arguments=None)
def test_profile_is_not_called_when_disbaled(self): """Test that calling parse_args does not enable profiling when the --profile option is not added.""" # temporarily patch compulsory args so that profiling is disabled by # default compulsory_arguments = { 'profile': (['--profile'], { 'default': False }), 'profile_file': (['--profile-file'], { 'default': None }) } with patch('improver.argparser.ArgParser.COMPULSORY_ARGUMENTS', compulsory_arguments): with patch('improver.argparser.profile_hook_enable') as \ mock_profile: parser = ArgParser(central_arguments=None, specific_arguments=None) parser.parse_args() self.assertEqual(mock_profile.call_count, 0)
def test_adding_empty_argument_list_does_nothing(self): """Test that attempting to add an empty list of argspecs to the ArgParser does not add any new arguments.""" args_to_add = [] # add a specific (optional) argument - ensures that even if there are # no compulsory arguments, we have something... # adding arguments after calling parse_args/args will do nothing, so # instead create 2 instances: parser1 = ArgParser(central_arguments=None, specific_arguments=[[['--optional'], {}]]) parser2 = ArgParser(central_arguments=None, specific_arguments=[[['--optional'], {}]]) parser2.add_arguments(args_to_add) self.assertEqual(parser1.parse_args(), parser2.parse_args())
def main(argv=None): """Load in the arguments and ensure they are set correctly. Then run the time-lagged ensembles on the input cubes.""" parser = ArgParser( description='This combines the realizations from different forecast ' 'cycles into one cube. It does this by taking an input ' 'CubeList containing forecasts from different cycles and ' 'merges them into a single cube, removing any metadata ' 'that does not match.') parser.add_argument('input_filenames', metavar='INPUT_FILENAMES', nargs="+", type=str, help='Paths to input NetCDF files for the time-lagged ' 'ensemble to combine the realizations.') parser.add_argument('output_file', metavar='OUTPUT_FILE', help='The output file for the processed NetCDF.') args = parser.parse_args(args=argv) # Load the cubes cubes = iris.cube.CubeList([]) for filename in args.input_filenames: new_cube = load_cube(filename) cubes.append(new_cube) # Warns if a single file is input if len(cubes) == 1: warnings.warn('Only a single cube input, so time lagging will have ' 'no effect.') save_netcdf(cubes[0], args.output_file) # Raises an error if the validity times do not match else: for i, this_cube in enumerate(cubes): for later_cube in cubes[i+1:]: if this_cube.coord('time') == later_cube.coord('time'): continue else: msg = ("Cubes with mismatched validity times are not " "compatible.") raise ValueError(msg) result = GenerateTimeLaggedEnsemble().process(cubes) save_netcdf(result, args.output_file)
def main(argv=None): """Load in arguments and get going.""" parser = ArgParser( description=('Reads input orography and landmask fields. Creates ' 'a series of topographic zone weights to indicate ' 'where an orography point sits within the defined ' 'topographic bands. If the orography point is in the ' 'centre of a topographic band, then a single band will ' 'have a weight of 1.0. If the orography point is at the ' 'edge of a topographic band, then the upper band will ' 'have a 0.5 weight whilst the lower band will also have ' 'a 0.5 weight. Otherwise, the weight will vary linearly ' 'between the centre of a topographic band and the edge.')) parser.add_argument('input_filepath_standard_orography', metavar='INPUT_FILE_STANDARD_OROGRAPHY', help=('A path to an input NetCDF orography file to ' 'be processed')) parser.add_argument('output_filepath', metavar='OUTPUT_FILE', help='The output path for the processed NetCDF.') parser.add_argument('--input_filepath_landmask', metavar='INPUT_FILE_LAND', help=('A path to an input NetCDF land mask file to be ' 'processed. If provided, sea points will be ' 'masked and set to the default fill value. If ' 'no land mask is provided, weights will be ' 'generated for sea points as well as land, ' 'included in the appropriate topographic band.')) parser.add_argument('--force', dest='force', default=False, action='store_true', help=('If keyword is set (i.e. True), ancillaries ' 'will be generated even if doing so will ' 'overwrite existing files')) parser.add_argument('--thresholds_filepath', metavar='THRESHOLDS_FILEPATH', default=None, help=("The path to a json file which can be used " "to set the number and size of topographic " "bounds. If unset a default bounds dictionary" " will be used:" "{'bounds': [[-500., 50.], [50., 100.], " "[100., 150.],[150., 200.], [200., 250.], " "[250., 300.], [300., 400.], [400., 500.], " "[500., 650.],[650., 800.], [800., 950.], " "[950., 6000.]], 'units': 'm'}")) args = parser.parse_args(args=argv) thresholds_dict = load_json_or_none(args.thresholds_filepath) if not os.path.exists(args.output_filepath) or args.force: orography = load_cube(args.input_filepath_standard_orography) landmask = None if args.input_filepath_landmask: try: landmask = load_cube(args.input_filepath_landmask) except IOError as err: msg = ("Loading land mask has been unsuccessful: {}. " "This may be because the land mask could not be " "located in {}; run " 'improver-generate-landmask-ancillary first.').format( err, args.input_filepath_landmask) raise IOError(msg) result = process(landmask, orography, thresholds_dict) # Save Cube save_netcdf(result, args.output_filepath) else: print('File already exists here: ', args.output_filepath)
def main(argv=None): """Calculate orographic enhancement of precipitation from model pressure, temperature, relative humidity and wind input files""" parser = ArgParser(description='Calculate orographic enhancement using the' ' ResolveWindComponents() and OrographicEnhancement() ' 'plugins. Outputs data on the high resolution orography' ' grid and regridded to the coarser resolution of the ' 'input diagnostic variables.') parser.add_argument('temperature_filepath', metavar='TEMPERATURE_FILEPATH', help='Full path to input NetCDF file of temperature on' ' height levels') parser.add_argument('humidity_filepath', metavar='HUMIDITY_FILEPATH', help='Full path to input NetCDF file of relative ' 'humidity on height levels') parser.add_argument('pressure_filepath', metavar='PRESSURE_FILEPATH', help='Full path to input NetCDF file of pressure on ' 'height levels') parser.add_argument('windspeed_filepath', metavar='WINDSPEED_FILEPATH', help='Full path to input NetCDF file of wind speed on ' 'height levels') parser.add_argument('winddir_filepath', metavar='WINDDIR_FILEPATH', help='Full path to input NetCDF file of wind direction' ' on height levels') parser.add_argument('orography_filepath', metavar='OROGRAPHY_FILEPATH', help='Full path to input NetCDF high resolution ' 'orography ancillary. This should be on the same or a ' 'finer resolution grid than the input variables, and ' 'defines the grid on which the orographic enhancement ' 'will be calculated.') parser.add_argument('output_dir', metavar='OUTPUT_DIR', help='Directory ' 'to write output orographic enhancement files') parser.add_argument('--boundary_height', type=float, default=1000., help='Model height level to extract variables for ' 'calculating orographic enhancement, as proxy for ' 'the boundary layer.') parser.add_argument('--boundary_height_units', type=str, default='m', help='Units of the boundary height specified for ' 'extracting model levels.') args = parser.parse_args(args=argv) constraint_info = (args.boundary_height, args.boundary_height_units) temperature = load_and_extract(args.temperature_filepath, *constraint_info) humidity = load_and_extract(args.humidity_filepath, *constraint_info) pressure = load_and_extract(args.pressure_filepath, *constraint_info) wind_speed = load_and_extract(args.windspeed_filepath, *constraint_info) wind_dir = load_and_extract(args.winddir_filepath, *constraint_info) # load high resolution orography orography = load_cube(args.orography_filepath) orogenh_high_res, orogenh_standard = process(temperature, humidity, pressure, wind_speed, wind_dir, orography) # generate file names fname_standard = os.path.join(args.output_dir, generate_file_name(orogenh_standard)) fname_high_res = os.path.join( args.output_dir, generate_file_name(orogenh_high_res, parameter="orographic_enhancement_high_resolution")) # save output files save_netcdf(orogenh_standard, fname_standard) save_netcdf(orogenh_high_res, fname_high_res)
def main(argv=None): """Load in arguments and get going.""" parser = ArgParser( description="Calculate the continuous falling snow level ") parser.add_argument("temperature", metavar="TEMPERATURE", help="Path to a NetCDF file of air temperatures at" " heights (m) at the points for which the continuous " "falling snow level is being calculated.") parser.add_argument("relative_humidity", metavar="RELATIVE_HUMIDITY", help="Path to a NetCDF file of relative_humidities at" " heights (m) at the points for which the continuous " "falling snow level is being calculated.") parser.add_argument("pressure", metavar="PRESSURE", help="Path to a NetCDF file of air pressures at" " heights (m) at the points for which the continuous " "falling snow level is being calculated.") parser.add_argument("orography", metavar="OROGRAPHY", help="Path to a NetCDF file containing " "the orography height in m of the terrain " "over which the continuous falling snow level is " "being calculated.") parser.add_argument("land_sea_mask", metavar="LAND_SEA_MASK", help="Path to a NetCDF file containing " "the binary land-sea mask for the points " "for which the continuous falling snow level is " "being calculated. Land points are set to 1, sea " "points are set to 0.") parser.add_argument("output_filepath", metavar="OUTPUT_FILE", help="The output path for the processed NetCDF") parser.add_argument("--precision", metavar="NEWTON_PRECISION", default=0.005, type=float, help="Precision to which the wet bulb temperature " "is required: This is used by the Newton iteration " "default value is 0.005") parser.add_argument("--falling_level_threshold", metavar="FALLING_LEVEL_THRESHOLD", default=90.0, type=float, help=("Cutoff threshold for the wet-bulb integral used" " to calculate the falling snow level. This " "threshold indicates the level at which falling " "snow is deemed to have melted to become rain. " "The default value is 90.0, an empirically " "derived value.")) args = parser.parse_args(args=argv) # Load Cubes temperature = load_cube(args.temperature, no_lazy_load=True) relative_humidity = load_cube(args.relative_humidity, no_lazy_load=True) pressure = load_cube(args.pressure, no_lazy_load=True) orog = load_cube(args.orography, no_lazy_load=True) land_sea = load_cube(args.land_sea_mask, no_lazy_load=True) # Process Cube result = process(temperature, relative_humidity, pressure, orog, land_sea, args.precision, args.falling_level_threshold) # Save Cube save_netcdf(result, args.output_filepath)
def main(argv=None): """Load in arguments and get going.""" parser = ArgParser( description="Calculate percentiled data over a given coordinate by " "collapsing that coordinate. Typically used to convert realization " "data into percentiled data, but may calculate over any " "dimension coordinate. Alternatively, calling this CLI with a dataset" " containing probabilities will convert those to percentiles using " "the ensemble copula coupling plugin. If no particular percentiles " "are given at which to calculate values and no 'number of percentiles'" " to calculate are specified, the following defaults will be used: " "[0, 5, 10, 20, 25, 30, 40, 50, 60, 70, 75, 80, 90, 95, 100]") parser.add_argument("input_filepath", metavar="INPUT_FILE", help="A path to an input NetCDF file to be processed") parser.add_argument("output_filepath", metavar="OUTPUT_FILE", help="The output path for the processed NetCDF") parser.add_argument("--coordinates", metavar="COORDINATES_TO_COLLAPSE", nargs="+", help="Coordinate or coordinates over which to collapse" " data and calculate percentiles; e.g. " "'realization' or 'latitude longitude'. This argument " "must be provided when collapsing a coordinate or " "coordinates to create percentiles, but is redundant " "when converting probabilities to percentiles and may " "be omitted. This coordinate(s) will be removed " "and replaced by a percentile coordinate.") parser.add_argument('--ecc_bounds_warning', default=False, action='store_true', help='If True, where calculated percentiles are ' 'outside the ECC bounds range, raise a warning ' 'rather than an exception.') group = parser.add_mutually_exclusive_group(required=False) group.add_argument("--percentiles", metavar="PERCENTILES", nargs="+", default=None, type=float, help="Optional definition of percentiles at which to " "calculate data, e.g. --percentiles 0 33.3 66.6 100") group.add_argument('--no-of-percentiles', default=None, type=int, metavar='NUMBER_OF_PERCENTILES', help="Optional definition of the number of percentiles " "to be generated, these distributed regularly with the " "aim of dividing into blocks of equal probability.") args = parser.parse_args(args=argv) # Load Cube cube = load_cube(args.input_filepath) # Process Cube result = process(cube, args.coordinates, args.ecc_bounds_warning, args.percentiles, args.no_of_percentiles) # Save Cube save_netcdf(result, args.output_filepath)
def main(argv=None): """Apply lapse rates to temperature data.""" parser = ArgParser(description='Apply downscaling temperature adjustment ' 'using calculated lapse rate.') parser.add_argument('temperature_filepath', metavar='TEMPERATURE_FILEPATH', help='Full path to input temperature NetCDF file') parser.add_argument('lapse_rate_filepath', metavar='LAPSE_RATE_FILEPATH', help='Full path to input lapse rate NetCDF file') parser.add_argument('source_orography', metavar='SOURCE_OROG_FILE', help='Full path to NetCDF file containing the source ' 'model orography') parser.add_argument('target_orography', metavar='TARGET_OROG_FILE', help='Full path to target orography NetCDF file ' '(to which temperature will be downscaled)') parser.add_argument('output_file', metavar='OUTPUT_FILE', help='File name ' 'to write lapse rate adjusted temperature data') args = parser.parse_args(args=argv) # Load cubes temperature = load_cube(args.temperature_filepath) lapse_rate = load_cube(args.lapse_rate_filepath) source_orog = load_cube(args.source_orography) target_orog = load_cube(args.target_orography) # Process Cubes adjusted_temperature = process(temperature, lapse_rate, source_orog, target_orog) # Save to Cube save_netcdf(adjusted_temperature, args.output_file)