Beispiel #1
0
    def test_multiple_cubes_same_validity_time():
        """Basic test using several input cubes with same validity times
        but with different forecast_period."""
        # Set up expected dataframe.
        validity_date = dt.utcfromtimestamp(1487311200).date()
        data = [[
            validity_date, 600, 1000, "air_temperature", "IMPRO", 280., 281.,
            282.
        ]]

        columns = [
            "validity_date", "validity_time", "station_id", "cf_name",
            "exp_id", "fcr_tplus000", "fcr_tplus001", "fcr_tplus002"
        ]
        expected_df = pd.DataFrame(data, columns=columns)
        expected_df = expected_df.set_index([
            "validity_date", "validity_time", "station_id", "cf_name", "exp_id"
        ])
        expected_df.columns.name = "forecast_period"
        # Call the plugin.
        cubes = [
            set_up_spot_cube(280 + i, forecast_period=i, number_of_sites=1)
            for i in range(3)
        ]
        cubes = iris.cube.CubeList(cubes)
        plugin = VerificationTable("output", "csv", "improver", "IMPRO", 0)
        plugin.to_dataframe(cubes)
        result = plugin.df
        assert_frame_equal(expected_df, result)
Beispiel #2
0
    def test_single_cube():
        """Basic test using one input cube."""

        plugin = VerificationTable("csv", "output", "improver", "nbhood", 3600)
        test_dataframe = pd.DataFrame(data=np.array([280.0, 280.0, 280.0]),
                                      columns=["fcr_tplus000"])
        plugin.ensure_all_forecast_columns(test_dataframe)
        result = test_dataframe
        expected_dataframe = pd.DataFrame(
            data=np.array([[280.0, np.nan], [280.0, np.nan], [280.0, np.nan]]),
            columns=["fcr_tplus000", "fcr_tplus001"])
        assert_frame_equal(expected_dataframe, result)
Beispiel #3
0
 def test_single_cube_extra_data(self):
     """Basic test using one input cube with an extra point in the
        percentile dimension."""
     # Set up cubes
     cube = set_up_spot_cube(280)
     second_cube = cube.copy()
     second_cube.coord("percentile").points = np.array([60.0])
     cubelist = iris.cube.CubeList([cube, second_cube])
     cubes = cubelist.concatenate()
     plugin = VerificationTable("csv", "output", "improver", "IMPRO", 0)
     message = "Dimensions that are not described by the pivot_dim or "\
               "coord_to_slice_over must only have one point in. "\
               "Dimension '1' has length '2' and is associated with the "\
               "'percentile' coordinate."
     with self.assertRaisesRegexp(ValueError, message):
         plugin.to_dataframe(cubes)
Beispiel #4
0
 def test_basic_repr(self):
     """Basic test of string representation"""
     expected_result = ("<VerificationTable: csv, output, improver, "
                        "nbhood, 54>")
     result = str(
         VerificationTable("csv", "output", "improver", "nbhood", 54))
     self.assertEqual(expected_result, result)
Beispiel #5
0
 def test_single_cube_single_site():
     """Basic test using one input cube with a single site in it."""
     # Set up expected dataframe.
     validity_date = dt.utcfromtimestamp(1487311200).date()
     data = [[validity_date, 600, 1000, "air_temperature", "IMPRO", 280.]]
     columns = [
         "validity_date", "validity_time", "station_id", "cf_name",
         "exp_id", "fcr_tplus000"
     ]
     expected_df = pd.DataFrame(data, columns=columns)
     expected_df = expected_df.set_index([
         "validity_date", "validity_time", "station_id", "cf_name", "exp_id"
     ])
     expected_df.columns.name = "forecast_period"
     # Call the plugin.
     cubes = iris.cube.CubeList([set_up_spot_cube(280, number_of_sites=1)])
     plugin = VerificationTable("output", "csv", "improver", "IMPRO", 0)
     plugin.to_dataframe(cubes)
     result = plugin.df
     assert_frame_equal(expected_df, result)