def test_partial_aggregation_over_more_than_one_dim_on_multidimensional_coord( self): from cis.data_io.gridded_data import GriddedDataList, make_from_cube data1 = make_from_cube( make_mock_cube(time_dim_length=7, hybrid_pr_len=5)) data2 = make_from_cube( make_mock_cube(time_dim_length=7, hybrid_pr_len=5, data_offset=1)) datalist = GriddedDataList([data1, data2]) cube_out = datalist.collapsed(['t', 'x'], how=self.kernel) result_data = numpy.array([[51.0, 52.0, 53.0, 54.0, 55.0], [156.0, 157.0, 158.0, 159.0, 160.0], [261.0, 262.0, 263.0, 264.0, 265.0], [366.0, 367.0, 368.0, 369.0, 370.0], [471.0, 472.0, 473.0, 474.0, 475.0]], dtype=np.float) multidim_coord_points = numpy.array( [1000000., 3100000., 5200000., 7300000., 9400000.], dtype=np.float) assert_arrays_almost_equal(cube_out[0].data, result_data) assert_arrays_almost_equal(cube_out[1].data, result_data + 1) assert_arrays_almost_equal( cube_out[0].coord('surface_air_pressure').points, multidim_coord_points) assert_arrays_almost_equal( cube_out[1].coord('surface_air_pressure').points, multidim_coord_points)
def test_complete_collapse_one_dim_using_moments_kernel(self): self.kernel = aggregation_kernels['moments'] data1 = make_from_cube(make_5x3_lon_lat_2d_cube_with_missing_data()) data1.var_name = 'var1' data2 = make_from_cube(make_5x3_lon_lat_2d_cube_with_missing_data()) data2.var_name = 'var2' data2.data += 10 data = GriddedDataList([data1, data2]) output = data.collapsed(['x'], how=self.kernel) expect_mean = numpy.array([[5.5, 8.75, 9]]) expect_stddev = numpy.array( [numpy.sqrt(15), numpy.sqrt(26.25), numpy.sqrt(30)]) expect_count = numpy.array([[4, 4, 4]]) assert isinstance(output, GriddedDataList) assert len(output) == 6 mean_1, stddev_1, count_1, mean_2, stddev_2, count_2 = output assert mean_1.var_name == 'var1' assert stddev_1.var_name == 'var1_std_dev' assert count_1.var_name == 'var1_num_points' assert mean_2.var_name == 'var2' assert stddev_2.var_name == 'var2_std_dev' assert count_2.var_name == 'var2_num_points' assert_arrays_almost_equal(mean_1.data, expect_mean) assert_arrays_almost_equal(mean_2.data, expect_mean + 10) assert_arrays_almost_equal(stddev_1.data, expect_stddev) assert_arrays_almost_equal(stddev_2.data, expect_stddev) assert_arrays_almost_equal(count_1.data, expect_count) assert_arrays_almost_equal(count_2.data, expect_count)
def test_complete_collapse_two_dims_using_moments_kernel(self): self.kernel = aggregation_kernels['moments'] data1 = make_from_cube(make_5x3_lon_lat_2d_cube_with_missing_data()) data1.var_name = 'var1' data2 = make_from_cube(make_5x3_lon_lat_2d_cube_with_missing_data()) data2.var_name = 'var2' data2.data += 10 data = GriddedDataList([data1, data2]) output = data.collapsed(['x', 'y'], how=self.kernel) expect_mean = numpy.array(7.75) expect_stddev = numpy.array(numpy.sqrt(244.25 / 11)) expect_count = numpy.array(12) assert isinstance(output, GriddedDataList) assert len(output) == 6 mean_1, stddev_1, count_1, mean_2, stddev_2, count_2 = output assert mean_1.var_name == 'var1' assert stddev_1.var_name == 'var1_std_dev' assert count_1.var_name == 'var1_num_points' assert mean_2.var_name == 'var2' assert stddev_2.var_name == 'var2_std_dev' assert count_2.var_name == 'var2_num_points' # Latitude area weighting means these aren't quite right so increase the rtol. assert numpy.allclose(mean_1.data, expect_mean, 1e-3) assert numpy.allclose(mean_2.data, expect_mean + 10, 1e-3) assert numpy.allclose(stddev_1.data, expect_stddev) assert numpy.allclose(stddev_2.data, expect_stddev) assert numpy.allclose(count_1.data, expect_count) assert numpy.allclose(count_2.data, expect_count)
def test_complete_collapse_one_dim_using_moments_kernel(self): self.kernel = aggregation_kernels['moments'] data1 = make_from_cube(make_5x3_lon_lat_2d_cube_with_missing_data()) data1.var_name = 'var1' data2 = make_from_cube(make_5x3_lon_lat_2d_cube_with_missing_data()) data2.var_name = 'var2' data2.data += 10 data = GriddedDataList([data1, data2]) output = data.collapsed(['x'], how=self.kernel) expect_mean = numpy.array([[5.5, 8.75, 9]]) expect_stddev = numpy.array([numpy.sqrt(15), numpy.sqrt(26.25), numpy.sqrt(30)]) expect_count = numpy.array([[4, 4, 4]]) assert isinstance(output, GriddedDataList) assert len(output) == 6 mean_1, stddev_1, count_1, mean_2, stddev_2, count_2 = output assert mean_1.var_name == 'var1' assert stddev_1.var_name == 'var1_std_dev' assert count_1.var_name == 'var1_num_points' assert mean_2.var_name == 'var2' assert stddev_2.var_name == 'var2_std_dev' assert count_2.var_name == 'var2_num_points' assert_arrays_almost_equal(mean_1.data, expect_mean) assert_arrays_almost_equal(mean_2.data, expect_mean + 10) assert_arrays_almost_equal(stddev_1.data, expect_stddev) assert_arrays_almost_equal(stddev_2.data, expect_stddev) assert_arrays_almost_equal(count_1.data, expect_count) assert_arrays_almost_equal(count_2.data, expect_count)
def test_aggregate_mean(self): from cis.data_io.gridded_data import GriddedDataList, make_from_cube data1 = make_from_cube(make_mock_cube()) data2 = make_from_cube(make_mock_cube(data_offset=1)) datalist = GriddedDataList([data1, data2]) cube_out = datalist.collapsed(['y'], how=self.kernel) result1 = numpy.array([7, 8, 9]) result2 = result1 + 1 assert isinstance(cube_out, GriddedDataList) # There is a small deviation to the weighting correction applied by Iris when completely collapsing assert_arrays_almost_equal(result1, cube_out[0].data) assert_arrays_almost_equal(result2, cube_out[1].data)
def test_collapse_vertical_coordinate(self): from cis.data_io.gridded_data import GriddedDataList, make_from_cube data1 = make_from_cube(make_mock_cube(alt_dim_length=6)) data2 = make_from_cube(make_mock_cube(alt_dim_length=6, data_offset=1)) datalist = GriddedDataList([data1, data2]) cube_out = datalist.collapsed(['z'], how=self.kernel) result1 = data1.data.mean(axis=2) result2 = result1 + 1 assert isinstance(cube_out, GriddedDataList) # There is a small deviation to the weighting correction applied by Iris when completely collapsing assert_arrays_almost_equal(result1, cube_out[0].data) assert_arrays_almost_equal(result2, cube_out[1].data) assert numpy.array_equal(data1.coords('latitude')[0].points, cube_out.coords('latitude')[0].points)
def test_collapse_vertical_coordinate(self): from cis.data_io.gridded_data import GriddedDataList, make_from_cube data1 = make_from_cube(make_mock_cube(alt_dim_length=6)) data2 = make_from_cube(make_mock_cube(alt_dim_length=6, data_offset=1)) datalist = GriddedDataList([data1, data2]) cube_out = datalist.collapsed(['z'], how=self.kernel) result1 = data1.data.mean(axis=2) result2 = result1 + 1 assert isinstance(cube_out, GriddedDataList) # There is a small deviation to the weighting correction applied by Iris when completely collapsing assert_arrays_almost_equal(result1, cube_out[0].data) assert_arrays_almost_equal(result2, cube_out[1].data) assert numpy.array_equal( data1.coords('latitude')[0].points, cube_out.coords('latitude')[0].points)
def test_collapse_vertical_coordinate_weighted_aggregator(self): """ We use a weighted aggregator, though no weights should be applied since we're only summing over the vertical """ from cis.data_io.gridded_data import GriddedDataList, make_from_cube data1 = make_from_cube(make_mock_cube(alt_dim_length=6)) data2 = make_from_cube(make_mock_cube(alt_dim_length=6, data_offset=1)) datalist = GriddedDataList([data1, data2]) cube_out = datalist.collapsed(['z'], how=iris.analysis.SUM) result1 = np.sum(data1.data, axis=2) result2 = np.sum(data2.data, axis=2) assert isinstance(cube_out, GriddedDataList) # There is a small deviation to the weighting correction applied by Iris when completely collapsing assert_arrays_almost_equal(result1, cube_out[0].data) assert_arrays_almost_equal(result2, cube_out[1].data) assert numpy.array_equal(data1.coords('latitude')[0].points, cube_out.coords('latitude')[0].points)
def test_partial_aggregation_over_more_than_one_dim_on_multidimensional_coord(self): from cis.data_io.gridded_data import GriddedDataList, make_from_cube data1 = make_from_cube(make_mock_cube(time_dim_length=7, hybrid_pr_len=5)) data2 = make_from_cube(make_mock_cube(time_dim_length=7, hybrid_pr_len=5, data_offset=1)) datalist = GriddedDataList([data1, data2]) cube_out = datalist.collapsed(['t', 'x'], how=self.kernel) result_data = numpy.array([[51.0, 52.0, 53.0, 54.0, 55.0], [156.0, 157.0, 158.0, 159.0, 160.0], [261.0, 262.0, 263.0, 264.0, 265.0], [366.0, 367.0, 368.0, 369.0, 370.0], [471.0, 472.0, 473.0, 474.0, 475.0]], dtype=np.float) multidim_coord_points = numpy.array([1000000., 3100000., 5200000., 7300000., 9400000.], dtype=np.float) assert_arrays_almost_equal(cube_out[0].data, result_data) assert_arrays_almost_equal(cube_out[1].data, result_data+1) assert_arrays_almost_equal(cube_out[0].coord('surface_air_pressure').points, multidim_coord_points) assert_arrays_almost_equal(cube_out[1].coord('surface_air_pressure').points, multidim_coord_points)
def test_collapse_vertical_coordinate_weighted_aggregator(self): """ We use a weighted aggregator, though no weights should be applied since we're only summing over the vertical """ from cis.data_io.gridded_data import GriddedDataList, make_from_cube data1 = make_from_cube(make_mock_cube(alt_dim_length=6)) data2 = make_from_cube(make_mock_cube(alt_dim_length=6, data_offset=1)) datalist = GriddedDataList([data1, data2]) cube_out = datalist.collapsed(['z'], how=iris.analysis.SUM) result1 = np.sum(data1.data, axis=2) result2 = np.sum(data2.data, axis=2) assert isinstance(cube_out, GriddedDataList) # There is a small deviation to the weighting correction applied by Iris when completely collapsing assert_arrays_almost_equal(result1, cube_out[0].data) assert_arrays_almost_equal(result2, cube_out[1].data) assert numpy.array_equal( data1.coords('latitude')[0].points, cube_out.coords('latitude')[0].points)