def test_pp_append_singles(self): # Test pp append saving - single cubes. # load 2 arrays of >2D cubes cube = stock.simple_pp() reference_txt_path = tests.get_result_path(('cube_to_pp', 'append_single.txt')) with self.cube_save_test(reference_txt_path, reference_cubes=[cube, cube]) as temp_pp_path: iris.save(cube, temp_pp_path) # Create file iris.save(cube, temp_pp_path, append=True) # Append to file reference_txt_path = tests.get_result_path(('cube_to_pp', 'replace_single.txt')) with self.cube_save_test(reference_txt_path, reference_cubes=cube) as temp_pp_path: iris.save(cube, temp_pp_path) # Create file iris.save(cube, temp_pp_path) # Replace file
def test_rotated_latlon(self): source_grib = tests.get_data_path(("GRIB", "rotated_nae_t", "sensible_pole.grib2")) reference_text = tests.get_result_path(("grib_save", "rotated_latlon.grib_compare.txt")) # TODO: Investigate small change in test result: # long [iDirectionIncrement]: [109994] != [109993] # Consider the change in dx_dy() to "InDegrees" too. self.save_and_compare(source_grib, reference_text)
def test_simple(self): cube = Cube(np.array([0, 1, 2, 3, 4.4]), long_name="foo") dim_coord = DimCoord([5, 6, 7, 8, 9], long_name="bar") cube.add_dim_coord(dim_coord, 0) series = iris.pandas.as_series(cube) self.assertArrayEqual(series, cube.data) self.assertString(str(series), tests.get_result_path(("pandas", "as_series", "simple.txt")))
def test_name2_field(self): filepath = tests.get_data_path(('NAME', 'NAMEII_field.txt')) name_cubes = iris.load(filepath) # Check gribapi version, because we currently have a known load/save # problem with gribapi 1v14 (at least). gribapi_ver = gribapi.grib_get_api_version() gribapi_fully_supported_version = \ (StrictVersion(gribapi.grib_get_api_version()) < StrictVersion('1.13')) for i, name_cube in enumerate(name_cubes): if not gribapi_fully_supported_version: data = name_cube.data if np.min(data) == np.max(data): msg = ('NAMEII cube #{}, "{}" has empty data : ' 'SKIPPING test for this cube, as save/load will ' 'not currently work with gribabi > 1v12.') warnings.warn(msg.format(i, name_cube.name())) continue with self.temp_filename('.grib2') as temp_filename: iris.save(name_cube, temp_filename) grib_cube = iris.load_cube(temp_filename, callback=name_cb) self.check_common(name_cube, grib_cube) self.assertCML( grib_cube, tests.get_result_path( ('integration', 'name_grib', 'NAMEII', '{}_{}.cml'.format(i, name_cube.name()))))
def test_pp_append_lists(self): # Test PP append saving - lists of cubes. # For each of the first four time-steps in the 4D cube, # pull out the bottom two levels. cube_4d = stock.realistic_4d() cubes = [cube_4d[i, :2, :, :] for i in range(4)] reference_txt_path = tests.get_result_path(('cube_to_pp', 'append_multi.txt')) with self.cube_save_test(reference_txt_path, reference_cubes=cubes) as temp_pp_path: iris.save(cubes[:2], temp_pp_path) iris.save(cubes[2:], temp_pp_path, append=True) reference_txt_path = tests.get_result_path(('cube_to_pp', 'replace_multi.txt')) with self.cube_save_test(reference_txt_path, reference_cubes=cubes[2:]) as temp_pp_path: iris.save(cubes[:2], temp_pp_path) iris.save(cubes[2:], temp_pp_path)
def test_time_gregorian(self): cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="ts") time_coord = DimCoord([0, 100.1, 200.2, 300.3, 400.4], long_name="time", units="days since 2000-01-01 00:00") cube.add_dim_coord(time_coord, 0) series = iris.pandas.as_series(cube) self.assertArrayEqual(series, cube.data) self.assertString(str(series), tests.get_result_path(("pandas", "as_series", "time_gregorian.txt")))
def test_no_y_coord(self): cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo") x_coord = DimCoord([10, 11, 12, 13, 14], long_name="bar") cube.add_dim_coord(x_coord, 1) data_frame = iris.pandas.as_data_frame(cube) self.assertArrayEqual(data_frame, cube.data) self.assertString(str(data_frame), tests.get_result_path(("pandas", "as_dataframe", "no_y_coord.txt")))
def test_default_coord_system(self): GeogCS = iris.coord_systems.GeogCS cube = iris.tests.stock.lat_lon_cube() reference_txt_path = tests.get_result_path(('cube_to_pp', 'default_coord_system.txt')) # Remove all coordinate systems. for coord in cube.coords(): coord.coord_system = None # Ensure no coordinate systems available. self.assertIsNone(cube.coord_system(GeogCS)) self.assertIsNone(cube.coord_system(None)) with self.cube_save_test(reference_txt_path, reference_cubes=cube) as \ temp_pp_path: # Save cube to PP with no coordinate system. iris.save(cube, temp_pp_path) pp_cube = iris.load_cube(temp_pp_path) # Ensure saved cube has the default coordinate system. self.assertIsInstance(pp_cube.coord_system(GeogCS), iris.coord_systems.GeogCS) self.assertIsNotNone(pp_cube.coord_system(None)) self.assertIsInstance(pp_cube.coord_system(None), iris.coord_systems.GeogCS) self.assertIsNotNone(pp_cube.coord_system()) self.assertIsInstance(pp_cube.coord_system(), iris.coord_systems.GeogCS)
def test_series_object(self): class Thing(object): def __repr__(self): return "A Thing" series = pandas.Series([0, 1, 2, 3, 4], index=[Thing(), Thing(), Thing(), Thing(), Thing()]) self.assertCML(iris.pandas.as_cube(series), tests.get_result_path(("pandas", "as_cube", "series_object.cml")))
def test_data_frame_masked(self): data_frame = pandas.DataFrame( [[0, float("nan"), 2, 3, 4], [5, 6, 7, np.nan, 9]], index=[10, 11], columns=[12, 13, 14, 15, 16] ) self.assertCML( iris.pandas.as_cube(data_frame), tests.get_result_path(("pandas", "as_cube", "data_frame_masked.cml")) )
def test_masked(self): data = np.ma.MaskedArray([0, 1, 2, 3, 4.4], mask=[0, 1, 0, 1, 0]) cube = Cube(data, long_name="foo") series = iris.pandas.as_series(cube) self.assertArrayEqual(series, cube.data.astype('f').filled(np.nan)) self.assertString( str(series), tests.get_result_path(('pandas', 'as_series', 'masked.txt')))
def test_time_360(self): cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="ts") time_unit = cf_units.Unit("days since 2000-01-01 00:00", calendar=cf_units.CALENDAR_360_DAY) time_coord = DimCoord([100.1, 200.2], long_name="time", units=time_unit) cube.add_dim_coord(time_coord, 0) data_frame = iris.pandas.as_data_frame(cube) self.assertArrayEqual(data_frame, cube.data) self.assertString(str(data_frame), tests.get_result_path(("pandas", "as_dataframe", "time_360.txt")))
def test_no_dim_coord(self): cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo") series = iris.pandas.as_series(cube) self.assertArrayEqual(series, cube.data) self.assertString( str(series), tests.get_result_path(('pandas', 'as_series', 'no_dim_coord.txt')))
def test_time_360(self): cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="ts") time_unit = cf_units.Unit("days since 2000-01-01 00:00", calendar=cf_units.CALENDAR_360_DAY) time_coord = DimCoord([0, 100.1, 200.2, 300.3, 400.4], long_name="time", units=time_unit) cube.add_dim_coord(time_coord, 0) series = iris.pandas.as_series(cube) self.assertArrayEqual(series, cube.data) self.assertString(str(series), tests.get_result_path(("pandas", "as_series", "time_360.txt")))
def test_latlon_forecast_plev(self): source_grib = tests.get_data_path(("GRIB", "uk_t", "uk_t.grib2")) if self.problem_gribapi_ver: result_file = "latlon_forecast_plev.grib_compare.pre1-12-0.txt" else: result_file = "latlon_forecast_plev.grib_compare.post1-12-0.txt" reference_text = tests.get_result_path(("grib_save", result_file)) self.save_and_compare(source_grib, reference_text)
def test_no_forecast_time(self): cube = stock.lat_lon_cube() coord = iris.coords.DimCoord(np.array([24], dtype=np.int64), standard_name="time", units="hours since epoch") cube.add_aux_coord(coord) self.assertCML(cube, ["cube_to_pp", "no_forecast_time.cml"]) reference_txt_path = tests.get_result_path(("cube_to_pp", "no_forecast_time.txt")) with self.cube_save_test(reference_txt_path, reference_cubes=cube) as temp_pp_path: iris.save(cube, temp_pp_path)
def check_dot(self, cube, reference_filename): test_string = iris.fileformats.dot.cube_text(cube) reference_path = tests.get_result_path(reference_filename) if os.path.isfile(reference_path): reference = ''.join(open(reference_path, 'r').readlines()) self._assert_str_same(reference, test_string, reference_filename, type_comparison_name='DOT files') else: tests.logger.warning('Creating result file: %s', reference_path) open(reference_path, 'w').writelines(test_string)
def test_no_dim_coords(self): cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo") data_frame = iris.pandas.as_data_frame(cube) self.assertArrayEqual(data_frame, cube.data) self.assertString( str(data_frame), tests.get_result_path(('pandas', 'as_dataframe', 'no_dim_coords.txt')))
def test_time_mean(self): # This test for time-mean fields also tests negative forecast time. source_grib = tests.get_data_path(("GRIB", "time_processed", "time_bound.grib2")) if self.problem_gribapi_ver: result_file = "time_mean.grib_compare.pre1-12-0.txt" else: result_file = "time_mean.grib_compare.post1-12-0.txt" reference_text = tests.get_result_path(("grib_save", result_file)) self.save_and_compare(source_grib, reference_text)
def test_data_frame_nonotonic(self): data_frame = pandas.DataFrame([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], index=[10, 10], columns=[12, 12, 14, 15, 16]) self.assertCML( iris.pandas.as_cube(data_frame), tests.get_result_path(('pandas', 'as_cube', 'data_frame_nonotonic.cml')))
def test_time_mean(self): # This test for time-mean fields also tests negative forecast time. # Because the results depend on the presence of our api patch, # we currently have results for both a patched and unpatched api. # If the api ever allows -ve ft, we should revert to a single result. source_grib = tests.get_data_path(("GRIB", "time_processed", "time_bound.grib2")) reference_text = tests.get_result_path(("grib_save", "time_mean.grib_compare.txt")) # TODO: It's not ideal to have grib patch awareness here... import unittest try: self.save_and_compare(source_grib, reference_text) except unittest.TestCase.failureException: reference_text = tests.get_result_path(( "grib_save", "time_mean.grib_compare.FT_PATCH.txt")) self.save_and_compare(source_grib, reference_text)
def test_data_frame_datetime_gregorian(self): data_frame = pandas.DataFrame( [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], index=[datetime.datetime(2001, 1, 1, 1, 1, 1), datetime.datetime(2002, 2, 2, 2, 2, 2)], columns=[10, 11, 12, 13, 14], ) self.assertCML( iris.pandas.as_cube(data_frame), tests.get_result_path(("pandas", "as_cube", "data_frame_datetime_gregorian.cml")), )
def test_data_frame_netcdftime_360(self): data_frame = pandas.DataFrame( [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], index=[netcdftime.datetime(2001, 1, 1, 1, 1, 1), netcdftime.datetime(2002, 2, 2, 2, 2, 2)], columns=[10, 11, 12, 13, 14], ) self.assertCML( iris.pandas.as_cube(data_frame, calendars={0: cf_units.CALENDAR_360_DAY}), tests.get_result_path(("pandas", "as_cube", "data_frame_netcdftime_360.cml")), )
def test_masked(self): data = np.ma.MaskedArray([[0, 1, 2, 3, 4.4], [5, 6, 7, 8, 9]], mask=[[0, 1, 0, 1, 0], [1, 0, 1, 0, 1]]) cube = Cube(data, long_name="foo") data_frame = iris.pandas.as_data_frame(cube) self.assertArrayEqual(data_frame, cube.data.astype('f').filled(np.nan)) self.assertString( str(data_frame), tests.get_result_path(('pandas', 'as_dataframe', 'masked.txt')))
def test_pp_save_rules(self): # Test pp save rules without user rules. #read in_filename = tests.get_data_path(('PP', 'simple_pp', 'global.pp')) cubes = iris.load(in_filename, callback=itab_callback) reference_txt_path = tests.get_result_path(('cube_to_pp', 'simple.txt')) with self.cube_save_test(reference_txt_path, reference_cubes=cubes) as temp_pp_path: iris.save(cubes, temp_pp_path)
def check_dot(self, cube, reference_filename): test_string = iris.fileformats.dot.cube_text(cube) reference_path = tests.get_result_path(reference_filename) if os.path.isfile(reference_path): with open(reference_path, 'r') as reference_fh: reference = ''.join(reference_fh.readlines()) self._assert_str_same(reference, test_string, reference_filename, type_comparison_name='DOT files') else: tests.logger.warning('Creating result file: %s', reference_path) with open(reference_path, 'w') as reference_fh: reference_fh.writelines(test_string)
def test_no_y_coord(self): cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo") x_coord = DimCoord([10, 11, 12, 13, 14], long_name="bar") cube.add_dim_coord(x_coord, 1) data_frame = iris.pandas.as_data_frame(cube) self.assertArrayEqual(data_frame, cube.data) self.assertString( str(data_frame), tests.get_result_path(('pandas', 'as_dataframe', 'no_y_coord.txt')))
def test_data_frame_nonotonic(self): data_frame = pandas.DataFrame( [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], index=[10, 10], columns=[12, 12, 14, 15, 16], ) self.assertCML( iris.pandas.as_cube(data_frame), tests.get_result_path( ("pandas", "as_cube", "data_frame_nonotonic.cml")), )
def test_no_forecast_time(self): cube = stock.lat_lon_cube() coord = iris.coords.DimCoord(np.array([24], dtype=np.int64), standard_name='time', units='hours since epoch') cube.add_aux_coord(coord) self.assertCML(cube, ['cube_to_pp', 'no_forecast_time.cml']) reference_txt_path = tests.get_result_path(('cube_to_pp', 'no_forecast_time.txt')) with self.cube_save_test(reference_txt_path, reference_cubes=cube) as temp_pp_path: iris.save(cube, temp_pp_path)
def test_data_frame_masked(self): data_frame = pandas.DataFrame( [[0, float("nan"), 2, 3, 4], [5, 6, 7, np.nan, 9]], index=[10, 11], columns=[12, 13, 14, 15, 16], ) self.assertCML( iris.pandas.as_cube(data_frame), tests.get_result_path( ("pandas", "as_cube", "data_frame_masked.cml")), )
def test_no_forecast_time(self): cube = stock.lat_lon_cube() coord = iris.coords.DimCoord(24, standard_name='time', units='hours since epoch') cube.add_aux_coord(coord) self.assertCML(cube, ['cube_to_pp', 'no_forecast_time.cml']) reference_txt_path = tests.get_result_path(('cube_to_pp', 'no_forecast_time.txt')) with self.cube_save_test(reference_txt_path, reference_cubes=cube) as temp_pp_path: iris.save(cube, temp_pp_path)
def test_no_x_coord(self): cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo") y_coord = DimCoord([10, 11], long_name="bar") cube.add_dim_coord(y_coord, 0) data_frame = iris.pandas.as_data_frame(cube) self.assertArrayEqual(data_frame, cube.data) self.assertString( str(data_frame), tests.get_result_path(('pandas', 'as_dataframe', 'no_x_coord.txt')))
def test_data_frame_datetime_gregorian(self): data_frame = pandas.DataFrame( [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], index=[datetime.datetime(2001, 01, 01, 01, 01, 01), datetime.datetime(2002, 02, 02, 02, 02, 02)], columns=[10, 11, 12, 13, 14]) self.assertCML( iris.pandas.as_cube(data_frame), tests.get_result_path(('pandas', 'as_cube', 'data_frame_datetime_gregorian.cml')))
def test_data_frame_multidim(self): data_frame = pandas.DataFrame( [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], index=[0, 1], columns=["col_1", "col_2", "col_3", "col_4", "col_5"], ) self.assertCML( iris.pandas.as_cube(data_frame), tests.get_result_path( ("pandas", "as_cube", "data_frame_multidim.cml")), )
def test_pp_append_lists(self): # Test PP append saving - lists of cubes. # For each of the first four time-steps in the 4D cube, # pull out the bottom two levels. cube_4d = stock.realistic_4d() cubes = [cube_4d[i, :2, :, :] for i in range(4)] reference_txt_path = tests.get_result_path( ('cube_to_pp', 'append_multi.txt')) with self.cube_save_test(reference_txt_path, reference_cubes=cubes) as temp_pp_path: iris.save(cubes[:2], temp_pp_path) iris.save(cubes[2:], temp_pp_path, append=True) reference_txt_path = tests.get_result_path( ('cube_to_pp', 'replace_multi.txt')) with self.cube_save_test(reference_txt_path, reference_cubes=cubes[2:]) as temp_pp_path: iris.save(cubes[:2], temp_pp_path) iris.save(cubes[2:], temp_pp_path)
def test_data_frame_cftime_360(self): data_frame = pandas.DataFrame([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], index=[ cftime.datetime(2001, 1, 1, 1, 1, 1), cftime.datetime(2002, 2, 2, 2, 2, 2) ], columns=[10, 11, 12, 13, 14]) self.assertCML( iris.pandas.as_cube(data_frame, calendars={0: cf_units.CALENDAR_360_DAY}), tests.get_result_path( ('pandas', 'as_cube', 'data_frame_netcdftime_360.cml')))
def test_pp_save_rules(self): # Test pp save rules without user rules. #read in_filename = tests.get_data_path(('PP', 'simple_pp', 'global.pp')) cubes = iris.load(in_filename, callback=itab_callback) reference_txt_path = tests.get_result_path( ('cube_to_pp', 'simple.txt')) with self.cube_save_test(reference_txt_path, reference_cubes=cubes) as temp_pp_path: iris.save(cubes, temp_pp_path)
def test_pp_append_lists(self): # Test pp append saving - lists of cubes. # Locate the first 4 files from the analysis dataset names = ['2008120%d1200__qwqu12ff.initanl.pp' % i for i in range(1, 5)] prefix = ['PP', 'trui', 'air_temp_init'] paths = [tests.get_data_path(prefix + [name]) for name in names] # Grab the first two levels from each file cubes = [iris.load_strict(path, callback=itab_callback) for path in paths] cubes = [cube[:2] for cube in cubes] reference_txt_path = tests.get_result_path(('cube_to_pp', 'append_multi.txt')) with self.cube_save_test(reference_txt_path, reference_cubes=cubes) as temp_pp_path: iris.save(cubes[:2], temp_pp_path) iris.save(cubes[2:], temp_pp_path, append=True) reference_txt_path = tests.get_result_path(('cube_to_pp', 'replace_multi.txt')) with self.cube_save_test(reference_txt_path, reference_cubes=cubes[2:]) as temp_pp_path: iris.save(cubes[:2], temp_pp_path) iris.save(cubes[2:], temp_pp_path)
def check_tiff_header(self, geotiff_fh, reference_filename): """ Checks the given tiff file handle's metadata matches the reference file contents. """ im = PIL.Image.open(geotiff_fh) tiff_header = '\n'.join( str((tag, val)) if not isinstance(val, unicode) else "(%s, '%s')" % (tag, val) for tag, val in sorted(im.tag.items())) reference_path = tests.get_result_path(reference_filename) self.assertString(tiff_header, reference_path)
def test_name2_field(self): filepath = tests.get_data_path(('NAME', 'NAMEII_field.txt')) name_cubes = iris.load(filepath) for i, name_cube in enumerate(name_cubes): with self.temp_filename('.grib2') as temp_filename: iris.save(name_cube, temp_filename) grib_cube = iris.load_cube(temp_filename, callback=name_cb) self.check_common(name_cube, grib_cube) self.assertCML( grib_cube, tests.get_result_path( ('integration', 'name_grib', 'NAMEII', '{}_{}.cml'.format(i, name_cube.name()))))
def test_series_datetime_gregorian(self): series = pandas.Series( [0, 1, 2, 3, 4], index=[datetime.datetime(2001, 1, 1, 1, 1, 1), datetime.datetime(2002, 2, 2, 2, 2, 2), datetime.datetime(2003, 3, 3, 3, 3, 3), datetime.datetime(2004, 4, 4, 4, 4, 4), datetime.datetime(2005, 5, 5, 5, 5, 5)]) self.assertCML( iris.pandas.as_cube(series), tests.get_result_path(('pandas', 'as_cube', 'series_datetime_gregorian.cml')))
def test_rotated_latlon(self): source_grib = tests.get_data_path( ("GRIB", "rotated_nae_t", "sensible_pole.grib2")) if self.problem_gribapi_ver: result_file = "rotated_latlon.grib_compare.pre1-12-0.txt" else: result_file = "rotated_latlon.grib_compare.post1-12-0.txt" reference_text = tests.get_result_path(("grib_save", result_file)) # TODO: Investigate small change in test result: # long [iDirectionIncrement]: [109994] != [109993] # Consider the change in dx_dy() to "InDegrees" too. self.save_and_compare(source_grib, reference_text)
def test_time_gregorian(self): cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="ts") time_coord = DimCoord([0, 100.1, 200.2, 300.3, 400.4], long_name="time", units="days since 2000-01-01 00:00") cube.add_dim_coord(time_coord, 0) series = iris.pandas.as_series(cube) self.assertArrayEqual(series, cube.data) self.assertString( str(series), tests.get_result_path( ('pandas', 'as_series', 'time_gregorian.txt')))
def test_name3_field(self): filepath = tests.get_data_path(('NAME', 'NAMEIII_field.txt')) name_cubes = iris.load(filepath) for i, name_cube in enumerate(name_cubes): with self.temp_filename('.grib2') as temp_filename: iris.save(name_cube, temp_filename) grib_cube = iris.load_cube(temp_filename, callback=name_cb) self.check_common(name_cube, grib_cube) self.assertCML( grib_cube, tests.get_result_path( ('integration', 'name_grib', 'NAMEIII', '{}_{}.cml'.format(i, name_cube.name()))))
def test_series_netcdftime_360(self): series = pandas.Series( [0, 1, 2, 3, 4], index=[netcdftime.datetime(2001, 1, 1, 1, 1, 1), netcdftime.datetime(2002, 2, 2, 2, 2, 2), netcdftime.datetime(2003, 3, 3, 3, 3, 3), netcdftime.datetime(2004, 4, 4, 4, 4, 4), netcdftime.datetime(2005, 5, 5, 5, 5, 5)]) self.assertCML( iris.pandas.as_cube(series, calendars={0: cf_units.CALENDAR_360_DAY}), tests.get_result_path(('pandas', 'as_cube', 'series_netcdfimte_360.cml')))
def test_time_360(self): cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="ts") time_unit = iris.unit.Unit("days since 2000-01-01 00:00", calendar=iris.unit.CALENDAR_360_DAY) time_coord = DimCoord([0, 100.1, 200.2, 300.3, 400.4], long_name="time", units=time_unit) cube.add_dim_coord(time_coord, 0) series = iris.pandas.as_series(cube) self.assertArrayEqual(series, cube.data) self.assertString( str(series), tests.get_result_path(('pandas', 'as_series', 'time_360.txt')))
def test_series_object(self): class Thing(object): def __repr__(self): return "A Thing" series = pandas.Series( [0, 1, 2, 3, 4], index=[Thing(), Thing(), Thing(), Thing(), Thing()]) self.assertCML( iris.pandas.as_cube(series), tests.get_result_path(('pandas', 'as_cube', 'series_object.cml')))
def check_tiff_header(self, geotiff_fh, reference_filename): """ Checks the given tiff file handle's metadata matches the reference file contents. """ im = PIL.Image.open(geotiff_fh) tiff_header = '\n'.join(str((tag, val)) for tag, val in sorted(im.tag.items())) reference_path = tests.get_result_path(reference_filename) self._check_same(tiff_header, reference_path, reference_filename, type_comparison_name='Tiff header')
def test_time_360(self): cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="ts") time_unit = iris.unit.Unit("days since 2000-01-01 00:00", calendar=iris.unit.CALENDAR_360_DAY) time_coord = DimCoord([100.1, 200.2], long_name="time", units=time_unit) cube.add_dim_coord(time_coord, 0) data_frame = iris.pandas.as_data_frame(cube) self.assertArrayEqual(data_frame, cube.data) self.assertString( str(data_frame), tests.get_result_path(('pandas', 'as_dataframe', 'time_360.txt')))
def test_data_frame_datetime_gregorian(self): data_frame = pandas.DataFrame( [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], index=[ datetime.datetime(2001, 1, 1, 1, 1, 1), datetime.datetime(2002, 2, 2, 2, 2, 2), ], columns=[10, 11, 12, 13, 14], ) self.assertCML( iris.pandas.as_cube(data_frame), tests.get_result_path( ("pandas", "as_cube", "data_frame_datetime_gregorian.cml")), )
def test_series_object(self): class Thing: def __repr__(self): return "A Thing" series = pandas.Series( [0, 1, 2, 3, 4], index=[Thing(), Thing(), Thing(), Thing(), Thing()], ) self.assertCML( iris.pandas.as_cube(series), tests.get_result_path(("pandas", "as_cube", "series_object.cml")), )
def test_series_datetime_gregorian(self): series = pandas.Series( [0, 1, 2, 3, 4], index=[ datetime.datetime(2001, 1, 1, 1, 1, 1), datetime.datetime(2002, 2, 2, 2, 2, 2), datetime.datetime(2003, 3, 3, 3, 3, 3), datetime.datetime(2004, 4, 4, 4, 4, 4), datetime.datetime(2005, 5, 5, 5, 5, 5), ], ) self.assertCML( iris.pandas.as_cube(series), tests.get_result_path( ("pandas", "as_cube", "series_datetime_gregorian.cml")), )
def test_series_cftime_360(self): series = pandas.Series( [0, 1, 2, 3, 4], index=[ cftime.datetime(2001, 1, 1, 1, 1, 1), cftime.datetime(2002, 2, 2, 2, 2, 2), cftime.datetime(2003, 3, 3, 3, 3, 3), cftime.datetime(2004, 4, 4, 4, 4, 4), cftime.datetime(2005, 5, 5, 5, 5, 5), ], ) self.assertCML( iris.pandas.as_cube(series, calendars={0: cf_units.CALENDAR_360_DAY}), tests.get_result_path( ("pandas", "as_cube", "series_netcdfimte_360.cml")), )
def test_name3_field(self): filepath = tests.get_data_path(("NAME", "NAMEIII_field.txt")) name_cubes = iris.load(filepath) for i, name_cube in enumerate(name_cubes): with self.temp_filename(".grib2") as temp_filename: iris.save(name_cube, temp_filename) grib_cube = iris.load_cube(temp_filename, callback=name_cb) self.check_common(name_cube, grib_cube) self.assertCML( grib_cube, tests.get_result_path(( "integration", "name_grib", "NAMEIII", "{}_{}.cml".format(i, name_cube.name()), )), )
def test_no_forecast_period(self): cube = stock.lat_lon_cube() # Add a bounded scalar time coord and a forecast_reference_time. time_coord = iris.coords.DimCoord( 10.958333, standard_name='time', units='days since 2013-05-10 12:00', bounds=[10.916667, 11.0]) cube.add_aux_coord(time_coord) forecast_reference_time = iris.coords.DimCoord( 2.0, standard_name='forecast_reference_time', units='weeks since 2013-05-07') cube.add_aux_coord(forecast_reference_time) self.assertCML(cube, ['cube_to_pp', 'no_forecast_period.cml']) reference_txt_path = tests.get_result_path(('cube_to_pp', 'no_forecast_period.txt')) with self.cube_save_test(reference_txt_path, reference_cubes=cube) as \ temp_pp_path: iris.save(cube, temp_pp_path)
def test_save_and_merge(self): cube = self._load_3d_cube() # extract the 2d field that has SOME missing values masked_slice = cube[0] masked_slice.data.fill_value = 123456 # test saving masked data reference_txt_path = tests.get_result_path(('cdm', 'masked_save_pp.txt')) with self.cube_save_test(reference_txt_path, reference_cubes=masked_slice) as temp_pp_path: iris.save(masked_slice, temp_pp_path) # test merge keeps the mdi we just saved cube1 = iris.load_cube(temp_pp_path) cube2 = cube1.copy() # make cube1 and cube2 differ on a scalar coord, to make them mergeable into a 3d cube cube2.coord("pressure").points[0] = 1001.0 merged_cubes = iris.cube.CubeList([cube1, cube2]).merge() self.assertEqual(len(merged_cubes), 1, "expected a single merged cube") merged_cube = merged_cubes[0] self.assertEqual(merged_cube.data.fill_value, 123456)
def check_pp(self, pp_fields, reference_filename): """ Checks the given iterable of PPField objects matches the reference file, or creates the reference file if it doesn't exist. """ # turn the generator into a list pp_fields = list(pp_fields) # Load deferred data for all of the fields (but don't do anything with it) for pp_field in pp_fields: pp_field.data test_string = str(pp_fields) reference_path = tests.get_result_path(reference_filename) if os.path.isfile(reference_path): with open(reference_path, 'r') as reference_fh: reference = ''.join(reference_fh.readlines()) self._assert_str_same(reference+'\n', test_string+'\n', reference_filename, type_comparison_name='PP files') else: with open(reference_path, 'w') as reference_fh: reference_fh.writelines(test_string)
def test_user_pp_save_rules(self): # Test pp save rules with user rules. #create a user rules file user_rules_filename = iris.util.create_temp_filename(suffix='.txt') try: with open(user_rules_filename, "wt") as user_rules_file: user_rules_file.write("IF\ncm.standard_name == 'air_temperature'\nTHEN\npp.lbuser[3] = 9222") iris.fileformats.pp.add_save_rules(user_rules_filename) try: #read pp in_filename = tests.get_data_path(('PP', 'simple_pp', 'global.pp')) cubes = iris.load(in_filename, callback=itab_callback) reference_txt_path = tests.get_result_path(('cube_to_pp', 'user_rules.txt')) with self.cube_save_test(reference_txt_path, reference_cubes=cubes) as temp_pp_path: iris.save(cubes, temp_pp_path) finally: iris.fileformats.pp.reset_save_rules() finally: os.remove(user_rules_filename)
def test_no_forecast_period(self): cube = stock.lat_lon_cube() # Add a bounded scalar time coord and a forecast_reference_time. time_coord = iris.coords.DimCoord( 10.958333, standard_name="time", units="days since 2013-05-10 12:00", bounds=[10.916667, 11.0], ) cube.add_aux_coord(time_coord) forecast_reference_time = iris.coords.DimCoord( 2.0, standard_name="forecast_reference_time", units="weeks since 2013-05-07", ) cube.add_aux_coord(forecast_reference_time) self.assertCML(cube, ["cube_to_pp", "no_forecast_period.cml"]) reference_txt_path = tests.get_result_path( ("cube_to_pp", "no_forecast_period.txt")) with self.cube_save_test(reference_txt_path, reference_cubes=cube) as temp_pp_path: iris.save(cube, temp_pp_path)