Ejemplo n.º 1
0
def main(argv=None):
    """Load in arguments for applying neighbourhood processing when using a
    mask."""
    from improver.argparser import ArgParser

    parser = ArgParser(
        description='Apply the requested neighbourhood method via the '
                    'ApplyNeighbourhoodProcessingWithAMask plugin to a file '
                    'with one diagnostic dataset in combination with a file '
                    'containing one or more masks. The mask dataset may have '
                    'an extra dimension compared to the input diagnostic. '
                    'In this case, the user specifies the name of '
                    'the extra coordinate and this coordinate is iterated '
                    'over so each mask is applied to separate slices over the'
                    ' input data. These intermediate masked datasets are then'
                    ' concatenated, resulting in a dataset that has been '
                    ' processed using multiple masks and has gained an extra '
                    'dimension from the masking.  There is also an option to '
                    're-mask the output dataset, so that after '
                    'neighbourhood processing, non-zero values are only '
                    'present for unmasked grid points. '
                    'There is an alternative option of collapsing the '
                    'dimension that we gain using this processing using a '
                    'weighted average.')
    parser.add_argument('coord_for_masking', metavar='COORD_FOR_MASKING',
                        help='Coordinate to iterate over when applying a mask '
                             'to the neighbourhood processing. ')
    parser.add_argument('input_filepath', metavar='INPUT_FILE',
                        help='A path to an input NetCDF file to be processed.')
    parser.add_argument('input_mask_filepath', metavar='INPUT_MASK_FILE',
                        help='A path to an input mask NetCDF file to be '
                             'used to mask the input file.')
    parser.add_argument('output_filepath', metavar='OUTPUT_FILE',
                        help='The output path for the processed NetCDF.')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--radius', metavar='RADIUS', type=float,
                       help='The radius (in m) for neighbourhood processing.')
    group.add_argument('--radii-by-lead-time',
                       metavar=('RADII_BY_LEAD_TIME', 'LEAD_TIME_IN_HOURS'),
                       nargs=2,
                       help='The radii for neighbourhood processing '
                       'and the associated lead times at which the radii are '
                       'valid. The radii are in metres whilst the lead time '
                       'has units of hours. The radii and lead times are '
                       'expected as individual comma-separated lists with '
                       'the list of radii given first followed by a list of '
                       'lead times to indicate at what lead time each radii '
                       'should be used. For example: 10000,12000,14000 1,2,3 '
                       'where a lead time of 1 hour uses a radius of 10000m, '
                       'a lead time of 2 hours uses a radius of 12000m, etc.')
    parser.add_argument('--sum_or_fraction', default="fraction",
                        choices=["sum", "fraction"],
                        help='The neighbourhood output can either be in the '
                             'form of a sum of the neighbourhood, or a '
                             'fraction calculated by dividing the sum of the '
                             'neighbourhood by the neighbourhood area. '
                             '"fraction" is the default option.')
    group2 = parser.add_mutually_exclusive_group()
    group2.add_argument('--re_mask', action='store_true',
                        help='If re_mask is set (i.e. True), the output data '
                             'following neighbourhood processing is '
                             're-masked. This re-masking removes any values '
                             'that have been generated by neighbourhood '
                             'processing at grid points that were '
                             'originally masked. '
                             'If not set, re_mask defaults to False and no '
                             're-masking is applied to the neighbourhood '
                             'processed output. Therefore, the neighbourhood '
                             'processing may result in values being present '
                             'in areas that were originally masked. This '
                             'allows the the values in adjacent bands to be'
                             'weighted together if the additional dimension'
                             'from the masking process is collapsed.')
    group2.add_argument('--collapse_dimension', action='store_true',
                        help='Collapse the dimension from the mask, by doing '
                             'a weighted mean using the weights provided. '
                             'This is only suitable when the result is is '
                             'left unmasked, so there is data to weight '
                             'between the points in coordinate we are '
                             'collapsing.')
    parser.add_argument('--weights_for_collapsing_dim', metavar='WEIGHTS',
                        default=None,
                        help='A path to an weights NetCDF file containing the '
                             'weights which are used for collapsing the '
                             'dimension gained through masking.')
    parser.add_argument('--intermediate_filepath', default=None,
                        help='If provided the result after neighbourhooding, '
                             'before collapsing the extra dimension is saved '
                             'in the given filepath.')

    args = parser.parse_args(args=argv)

    # Load Cubes
    cube = cli.inputcube(args.input_filepath)
    mask_cube = cli.inputcube(args.input_mask_filepath)
    weights = cli.inputcube(args.weights_for_collapsing_dim) if \
        args.collapse_dimension else None

    # Process Cube
    # pylint: disable=E1123
    process(cube, mask_cube, weights, coord_for_masking=args.coord_for_masking,
            radius=args.radius, radii_by_lead_time=args.radii_by_lead_time,
            sum_or_fraction=args.sum_or_fraction, remask=args.re_mask,
            collapse_dimension=args.collapse_dimension,
            output=args.output_filepath,
            intermediate_output=args.intermediate_filepath)
Ejemplo n.º 2
0
 def test_basic(self, m):
     """Tests that input cube calls load_cube with the string"""
     result = inputcube("foo")
     m.assert_called_with(improver.utilities.load.load_cube, "foo")
     self.assertEqual(result, 'return')
Ejemplo n.º 3
0
def main(argv=None):
    """Load in arguments for the cube combiner plugin.
    """
    from improver.argparser import ArgParser

    parser = ArgParser(
        description="Combine the input files into a single file using "
        "the requested operation e.g. + - min max etc.")
    parser.add_argument("input_filenames",
                        metavar="INPUT_FILENAMES",
                        nargs="+",
                        type=str,
                        help="Paths to the input NetCDF files. Each input"
                        " file should be able to be loaded as a single "
                        " iris.cube.Cube instance. The resulting file"
                        " metadata will be based on the first file but"
                        " its metadata can be overwritten via"
                        " the metadata_jsonfile option.")
    parser.add_argument("output_filepath",
                        metavar="OUTPUT_FILE",
                        help="The output path for the processed NetCDF.")
    parser.add_argument("--operation",
                        metavar="OPERATION",
                        default="+",
                        choices=[
                            "+", "-", "*", "add", "subtract", "multiply",
                            "min", "max", "mean"
                        ],
                        help="Operation to use in combining NetCDF datasets"
                        " Default=+ i.e. add ",
                        type=str)
    parser.add_argument("--new-name",
                        metavar="NEW_NAME",
                        default=None,
                        help="New name for the resulting dataset. Will"
                        " default to the name of the first dataset if "
                        "not set.",
                        type=str)
    parser.add_argument("--metadata_jsonfile",
                        metavar="METADATA_JSONFILE",
                        default=None,
                        help="Filename for the json file containing "
                        "required changes to the metadata. "
                        " default=None",
                        type=str)
    parser.add_argument('--warnings_on',
                        action='store_true',
                        help="If warnings_on is set (i.e. True), "
                        "Warning messages where metadata do not match "
                        "will be given. Default=False",
                        default=False)

    args = parser.parse_args(args=argv)

    new_metadata = cli.inputjson(args.metadata_jsonfile)
    # Load cubes
    cubelist = []
    new_cube_name = args.new_name
    for filename in args.input_filenames:
        new_cube = cli.inputcube(filename)
        cubelist.append(new_cube)
        if new_cube_name is None:
            new_cube_name = new_cube.name()
        if args.warnings_on:
            if (args.new_name is None and new_cube_name != new_cube.name()):
                msg = ("Defaulting to first cube name, {} but combining with"
                       "a cube with name, {}.".format(new_cube_name,
                                                      new_cube.name()))
                warnings.warn(msg)

    # Process Cube
    # pylint: disable=E1123
    process(*cubelist,
            operation=args.operation,
            new_name=new_cube_name,
            new_metadata=new_metadata,
            warnings_on=args.warnings_on,
            output=args.output_filepath)