def test_non_monotonic_realizations(self):
        """Test handling of case where realization coordinates cannot be
        directly concatenated into a monotonic coordinate"""
        data = 275.0 * np.ones((3, 3, 3), dtype=np.float32)
        cycletime = dt(2019, 6, 24, 9)
        cube1 = set_up_variable_cube(
            data,
            realizations=np.array([15, 16, 17], dtype=np.int32),
            time=cycletime,
            frt=dt(2019, 6, 24, 8),
        )
        cube2 = set_up_variable_cube(
            data,
            realizations=np.array([0, 18, 19], dtype=np.int32),
            time=cycletime,
            frt=cycletime,
        )

        expected_cube = set_up_variable_cube(
            275.0 * np.ones((6, 3, 3), dtype=np.float32),
            realizations=np.array([0, 15, 16, 17, 18, 19], dtype=np.int32),
            time=cycletime,
            frt=cycletime,
        )

        input_cubelist = iris.cube.CubeList([cube1, cube2])
        result = GenerateTimeLaggedEnsemble().process(input_cubelist)
        self.assertEqual(result, expected_cube)
        self.assertEqual(result.coord("realization").dtype, np.int32)
Beispiel #2
0
 def test_duplicate_realizations_more_input_cubes(self):
     """Test that the expected metadata is correct with different
        realizations and that realizations are renumbered if a
        duplicate is found, with 3 input cubes."""
     self.input_cube2.coord("realization").points = np.array([6, 7, 8])
     input_cube3 = self.input_cube2.copy()
     input_cube3.coord("forecast_reference_time").points = np.array(
         input_cube3.coord("forecast_reference_time").points[0] + 1)
     input_cube3.coord("forecast_period").points = np.array(
         input_cube3.coord("forecast_period").points[0] - 1)
     input_cube3.coord("realization").points = np.array([7, 8, 9])
     input_cubelist = iris.cube.CubeList(
         [self.input_cube, self.input_cube2, input_cube3])
     result = GenerateTimeLaggedEnsemble().process(
         input_cubelist)
     expected_forecast_period = np.array(2)
     expected_forecast_ref_time = np.array([402293.])
     expected_realizations = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8])
     self.assertArrayAlmostEqual(
         result.coord("forecast_period").points, expected_forecast_period)
     self.assertArrayAlmostEqual(
         result.coord("forecast_reference_time").points,
         expected_forecast_ref_time)
     self.assertArrayAlmostEqual(
         result.coord("realization").points, expected_realizations)
 def test_basic(self):
     """Test that the expected metadata is correct after a simple test"""
     result = GenerateTimeLaggedEnsemble().process(self.input_cubelist)
     expected_forecast_period = np.array(3)
     expected_forecast_ref_time = np.array([402292.])
     expected_realizations = np.array([0, 1, 2, 3, 4, 5])
     self.assertArrayAlmostEqual(
         result.coord("forecast_period").points, expected_forecast_period)
     self.assertArrayAlmostEqual(
         result.coord("forecast_reference_time").points,
         expected_forecast_ref_time)
     self.assertArrayAlmostEqual(
         result.coord("realization").points, expected_realizations)
Beispiel #4
0
 def test_cycletime(self):
     """Test that the expected metadata is correct with a different
        cycletime"""
     result = GenerateTimeLaggedEnsemble("20151123T0600Z").process(
         self.input_cubelist)
     expected_forecast_period = np.array(1)
     expected_forecast_ref_time = np.array([402294.])
     expected_realizations = np.array([0, 1, 2, 3, 4, 5])
     self.assertArrayAlmostEqual(
         result.coord("forecast_period").points, expected_forecast_period)
     self.assertArrayAlmostEqual(
         result.coord("forecast_reference_time").points,
         expected_forecast_ref_time)
     self.assertArrayAlmostEqual(
         result.coord("realization").points, expected_realizations)
 def test_realizations(self):
     """Test that the expected metadata is correct with a different
        realizations"""
     self.input_cube2.coord("realization").points = np.array([6, 7, 8])
     result = GenerateTimeLaggedEnsemble().process(self.input_cubelist)
     expected_forecast_period = np.array(3)
     expected_forecast_ref_time = np.array([402292.])
     expected_realizations = np.array([0, 1, 2, 6, 7, 8])
     self.assertArrayAlmostEqual(
         result.coord("forecast_period").points, expected_forecast_period)
     self.assertArrayAlmostEqual(
         result.coord("forecast_reference_time").points,
         expected_forecast_ref_time)
     self.assertArrayAlmostEqual(
         result.coord("realization").points, expected_realizations)
Beispiel #6
0
 def test_attributes(self):
     """Test what happens if input cubes have different attributes"""
     self.input_cube.attributes = {'institution': 'Met Office',
                                   'history': 'Process 1'}
     self.input_cube2.attributes = {'institution': 'Met Office',
                                    'history': 'Process 2'}
     result = GenerateTimeLaggedEnsemble().process(
         self.input_cubelist)
     expected_attributes = {'institution': 'Met Office'}
     self.assertEqual(result.attributes, expected_attributes)
 def test_single_cube(self, warning_list=None):
     """Test only one input cube returns cube unchanged.
        Also test the warning that is raised."""
     input_cubelist = iris.cube.CubeList([self.input_cube])
     expected_cube = self.input_cube.copy()
     result = GenerateTimeLaggedEnsemble().process(input_cubelist)
     self.assertEqual(result, expected_cube)
     warning_msg = "Only a single cube so no differences will be found"
     self.assertTrue(any(item.category == UserWarning
                         for item in warning_list))
     self.assertTrue(any(warning_msg in str(item)
                         for item in warning_list))
 def test_attributes(self):
     """Test what happens if input cubes have different attributes"""
     self.input_cube.attributes = {
         "institution": "Met Office",
         "history": "Process 1",
     }
     self.input_cube2.attributes = {
         "institution": "Met Office",
         "history": "Process 2",
     }
     result = GenerateTimeLaggedEnsemble().process(self.input_cubelist)
     expected_attributes = {"institution": "Met Office"}
     self.assertEqual(result.attributes, expected_attributes)
 def test_basic(self):
     """Test that the expected metadata is correct after a simple test"""
     result = GenerateTimeLaggedEnsemble().process(self.input_cubelist)
     expected_realizations = [0, 1, 2, 3, 4, 5]
     self.assertArrayAlmostEqual(
         result.coord("forecast_period").points, self.expected_fp)
     self.assertArrayAlmostEqual(
         result.coord("forecast_reference_time").points, self.expected_frt)
     self.assertArrayAlmostEqual(
         result.coord("realization").points, expected_realizations)
     self.assertEqual(result.coord("realization").dtype, np.int32)
 def test_duplicate_realizations(self):
     """Test that the expected metadata is correct with different
        realizations and that realizations are renumbered if a
        duplicate is found"""
     self.input_cube2.coord("realization").points = np.array([0, 7, 8])
     result = GenerateTimeLaggedEnsemble().process(self.input_cubelist)
     expected_realizations = [0, 1, 2, 3, 4, 5]
     self.assertArrayAlmostEqual(
         result.coord("forecast_period").points, self.expected_fp)
     self.assertArrayAlmostEqual(
         result.coord("forecast_reference_time").points, self.expected_frt)
     self.assertArrayAlmostEqual(
         result.coord("realization").points, expected_realizations)
     self.assertEqual(result.coord("realization").dtype, np.int32)
Beispiel #11
0
def main(argv=None):
    """Load in the arguments and ensure they are set correctly. Then run
    the time-lagged ensembles on the input cubes."""
    parser = ArgParser(
        description='This combines the realizations from different forecast '
                    'cycles into one cube. It does this by taking an input '
                    'CubeList containing forecasts from different cycles and '
                    'merges them into a single cube, removing any metadata '
                    'that does not match.')
    parser.add_argument('input_filenames', metavar='INPUT_FILENAMES',
                        nargs="+", type=str,
                        help='Paths to input NetCDF files for the time-lagged '
                        'ensemble to combine the realizations.')
    parser.add_argument('output_file', metavar='OUTPUT_FILE',
                        help='The output file for the processed NetCDF.')
    args = parser.parse_args(args=argv)

    # Load the cubes
    cubes = iris.cube.CubeList([])
    for filename in args.input_filenames:
        new_cube = load_cube(filename)
        cubes.append(new_cube)

    # Warns if a single file is input
    if len(cubes) == 1:
        warnings.warn('Only a single cube input, so time lagging will have '
                      'no effect.')
        save_netcdf(cubes[0], args.output_file)
    # Raises an error if the validity times do not match
    else:
        for i, this_cube in enumerate(cubes):
            for later_cube in cubes[i+1:]:
                if this_cube.coord('time') == later_cube.coord('time'):
                    continue
                else:
                    msg = ("Cubes with mismatched validity times are not "
                           "compatible.")
                    raise ValueError(msg)
        result = GenerateTimeLaggedEnsemble().process(cubes)
        save_netcdf(result, args.output_file)
Beispiel #12
0
def process(cubes):
    """Module to run time-lagged ensembles.

    This combines the realization from different forecast cycles into one cube.
    It does this by taking an input Cubelist containing forecasts from
    different cycles and merges them into a single cube, removing any
    metadata that does not match.

    Args:
        cubes (iris.cube.CubeList):
            CubeList for the time-lagged ensemble to combine the realizations.

    Returns:
        iris.cube.Cube:
            Merged Cube.

    Raises:
        ValueError:
            If cubes have mismatched validity times.
    """

    # Warns if a single file is input
    if len(cubes) == 1:
        warnings.warn('Only a single cube input, so time lagging will have '
                      'no effect.')
        return cubes[0]
    # Raises an error if the validity times do not match
    else:
        for i, this_cube in enumerate(cubes):
            for later_cube in cubes[i + 1:]:
                if this_cube.coord('time') == later_cube.coord('time'):
                    continue
                else:
                    msg = ("Cubes with mismatched validity times are not "
                           "compatible.")
                    raise ValueError(msg)
        return GenerateTimeLaggedEnsemble().process(cubes)