def test_wgdos_mo_pack(self): filepath = tests.get_data_path(('PP', 'wgdos_packed', 'nae.20100104-06_0001.pp')) orig_fields = pp.load(filepath) with self.temp_filename('.pp') as temp_filename: with open(temp_filename, 'wb') as fh: for field in orig_fields: field.save(fh) saved_fields = pp.load(temp_filename) for orig_field, saved_field in zip(orig_fields, saved_fields): assert_array_equal(orig_field.data, saved_field.data)
def setUp(self): self.original_pp_filepath = tests.get_data_path(('PP', 'globClim1', 'dec_subset.pp')) self.r = pp.load(self.original_pp_filepath) self.r_loaded_data = pp.load(self.original_pp_filepath, read_data=True) # Check that the result is a generator and convert to a list so that we can index and get the first one self.assertEqual( type(self.r), GeneratorType) self.r = list(self.r) self.assertEqual( type(self.r_loaded_data), GeneratorType) self.r_loaded_data = list(self.r_loaded_data)
def test_call_structure(self): # Check that the load function calls the two necessary utility # functions. extract_result = mock.Mock() interpret_patch = mock.patch('iris.fileformats.pp._interpret_fields', autospec=True, return_value=iter([])) field_gen_patch = mock.patch('iris.fileformats.pp._field_gen', autospec=True, return_value=extract_result) with interpret_patch as interpret, field_gen_patch as field_gen: pp.load('mock', read_data=True) interpret.assert_called_once_with(extract_result) field_gen.assert_called_once_with('mock', read_data_bytes=True)
def test_save_single(self): filepath = tests.get_data_path(('PP', 'model_comp', 'dec_first_field.pp')) f = next(pp.load(filepath)) temp_filename = iris.util.create_temp_filename(".pp") f.save(open(temp_filename, 'wb')) s = next(pp.load(temp_filename)) # force the data to be loaded (this was done for f when save was run) s.data self._assert_str_same(str(s)+'\n', str(f)+'\n', '', type_comparison_name='PP files') self.assertEqual(self.file_checksum(temp_filename), self.file_checksum(filepath)) os.remove(temp_filename)
def test_copy_field_deferred(self): field = pp.load(self.filename).next() clone = field.copy() self.assertIsInstance(clone._data, biggus.Array) self.assertEqual(field, clone) clone.lbyr = 666 self.assertNotEqual(field, clone)
def test_deepcopy_field_deferred(self): field = next(pp.load(self.filename)) clone = deepcopy(field) self.assertIsInstance(clone._data, biggus.Array) self.assertEqual(field, clone) clone.lbyr = 666 self.assertNotEqual(field, clone)
def test_save_single(self): filepath = tests.get_data_path(('PP', 'model_comp', 'dec_first_field.pp')) f = next(pp.load(filepath)) temp_filename = iris.util.create_temp_filename(".pp") with open(temp_filename, 'wb') as temp_fh: f.save(temp_fh) s = next(pp.load(temp_filename)) # force the data to be loaded (this was done for f when save was run) s.data self._assert_str_same(str(s)+'\n', str(f)+'\n', '', type_comparison_name='PP files') self.assertEqual(self.file_checksum(temp_filename), self.file_checksum(filepath)) os.remove(temp_filename)
def test_lots_of_extra_data(self): r = pp.load(tests.get_data_path(('PP', 'cf_processing', 'HadCM2_ts_SAT_ann_18602100.b.pp'))) r = list(r) self.assertEqual(r[0].lbcode.ix, 13) self.assertEqual(r[0].lbcode.iy, 23) self.assertEqual(len(r[0].lbcode), 5) self.check_pp(r, ('PP', 'extra_data_time_series.pp.txt'))
def test_save_api(self): filepath = self.original_pp_filepath f = next(pp.load(filepath)) temp_filename = iris.util.create_temp_filename(".pp") f.save(open(temp_filename, 'wb')) self.assertEqual(self.file_checksum(temp_filename), self.file_checksum(filepath)) os.remove(temp_filename)
def test_save_single(self): filepath = tests.get_data_path( ("PP", "ukV1", "ukVpmslont_first_field.pp")) f = next(pp.load(filepath)) temp_filename = iris.util.create_temp_filename(".pp") with open(temp_filename, "wb") as temp_fh: f.save(temp_fh) s = next(pp.load(temp_filename)) # force the data to be loaded (this was done for f when save was run) s.data self._assert_str_same(str(s) + "\n", str(f) + "\n", "", type_comparison_name="PP files") self.assertEqual(self.file_checksum(temp_filename), self.file_checksum(filepath)) os.remove(temp_filename)
def test_save_api(self): filepath = self.original_pp_filepath f = next(pp.load(filepath)) temp_filename = iris.util.create_temp_filename(".pp") with open(temp_filename, 'wb') as temp_fh: f.save(temp_fh) self.assertEqual(self.file_checksum(temp_filename), self.file_checksum(filepath)) os.remove(temp_filename)
def test_wgdos(self): r = pp.load(tests.get_data_path(('PP', 'wgdos_packed', 'nae.20100104-06_0001.pp'))) # Check that the result is a generator and convert to a list so that we can index and get the first one self.assertEqual( type(r), GeneratorType) r = list(r) self.check_pp(r, ('PP', 'nae_unpacked.pp.txt')) # check that trying to save this field again raises an error (we cannot currently write WGDOS packed fields) temp_filename = iris.util.create_temp_filename(".pp") self.assertRaises(NotImplementedError, r[0].save, open(temp_filename, 'wb')) os.remove(temp_filename)
def test_rle(self): r = pp.load(tests.get_data_path(('PP', 'ocean_rle', 'ocean_rle.pp'))) # Check that the result is a generator and convert to a list so that we can index and get the first one self.assertEqual( type(r), GeneratorType) r = list(r) self.check_pp(r, ('PP', 'rle_unpacked.pp.txt')) # check that trying to save this field again raises an error # (we cannot currently write RLE packed fields) with self.temp_filename('.pp') as temp_filename: with self.assertRaises(NotImplementedError): r[0].save(open(temp_filename, 'wb'))
def test_rle(self): r = pp.load(tests.get_data_path(('PP', 'ocean_rle', 'ocean_rle.pp'))) # Check that the result is a generator and convert to a list so that we can index and get the first one self.assertEqual(type(r), GeneratorType) r = list(r) self.check_pp(r, ('PP', 'rle_unpacked.pp.txt')) # check that trying to save this field again raises an error (we cannot currently write RLE packed fields) temp_filename = iris.util.create_temp_filename(".pp") self.assertRaises(NotImplementedError, r[0].save, open(temp_filename, 'wb')) os.remove(temp_filename)
def test_rle(self): r = pp.load(tests.get_data_path(("PP", "ocean_rle", "ocean_rle.pp"))) # Check that the result is a generator and convert to a list so that we # can index and get the first one self.assertEqual(type(r), GeneratorType) r = list(r) self.check_pp(r, ("PP", "rle_unpacked.pp.txt")) # check that trying to save this field again raises an error # (we cannot currently write RLE packed fields) with self.temp_filename(".pp") as temp_filename: with self.assertRaises(NotImplementedError): with open(temp_filename, "wb") as temp_fh: r[0].save(temp_fh)
def fields_from_cube(cubes): """ Return an iterator of PP fields generated from saving the given cube(s) to a temporary file, and then subsequently loading them again """ with tempfile.NamedTemporaryFile('w+b', suffix='.pp') as tmp_file: fh = tmp_file.file iris.save(cubes, fh, saver='pp') # make sure the fh is written to disk, and move it back to the # start of the file fh.flush() os.fsync(fh) fh.seek(0) # load in the saved pp fields and check the appropriate metadata for field in ff_pp.load(tmp_file.name): yield field
def test_mean_save(self): files = ['200812011200', '200812021200', '200812031200', '200812041200', '200812051200', '200812061200', '200812071200', '200812081200'] files = [tests.get_data_path(('PP', 'trui', 'air_temp_T24', f + '__qwqg12ff.T24.pp')) for f in files] air_temp_cube = iris.load_strict(files) self.assertCML(air_temp_cube, ['trui', 'air_temp_T24_subset.cml']) mean = air_temp_cube.collapsed("time", iris.analysis.MEAN) self.assertCML(mean, ['trui', 'air_temp_T24_subset_mean.cml']) temp_filename = iris.util.create_temp_filename(".pp") iris.io.save(mean, temp_filename) r = list(pp.load(temp_filename)) self.check_pp(r[0:1], ('trui', 'air_temp_T24_subset_mean.pp.txt')) os.remove(temp_filename)
def test_wgdos(self): filepath = tests.get_data_path( ('PP', 'wgdos_packed', 'nae.20100104-06_0001.pp')) r = pp.load(filepath) # Check that the result is a generator and convert to a list so that we # can index and get the first one self.assertEqual(type(r), GeneratorType) r = list(r) self.check_pp(r, ('PP', 'nae_unpacked.pp.txt')) # check that trying to save this field again raises an error # (we cannot currently write WGDOS packed fields without mo_pack) temp_filename = iris.util.create_temp_filename(".pp") with mock.patch('iris.fileformats.pp.mo_pack', None): with self.assertRaises(NotImplementedError): r[0].save(open(temp_filename, 'wb')) os.remove(temp_filename)
def test_wgdos(self): filepath = tests.get_data_path(('PP', 'wgdos_packed', 'nae.20100104-06_0001.pp')) r = pp.load(filepath) # Check that the result is a generator and convert to a list so that we # can index and get the first one self.assertEqual(type(r), GeneratorType) r = list(r) self.check_pp(r, ('PP', 'nae_unpacked.pp.txt')) # check that trying to save this field again raises an error # (we cannot currently write WGDOS packed fields without mo_pack) temp_filename = iris.util.create_temp_filename(".pp") with mock.patch('iris.fileformats.pp.mo_pack', None): with self.assertRaises(NotImplementedError): with open(temp_filename, 'wb') as temp_fh: r[0].save(temp_fh) os.remove(temp_filename)
def test_copy_field_deferred(self): field = pp.load(self.filename).next() clone = field.copy() self.assertEqual(field, clone) clone.lbyr = 666 self.assertNotEqual(field, clone)
def setUp(self): self.original_pp_filepath = tests.get_data_path(('PP', 'ukV1', 'ukVpmslont.pp')) self.r = list(pp.load(self.original_pp_filepath))[0:5]
def test_copy_field_non_deferred(self): field = pp.load(self.filename, True).next() clone = field.copy() self.assertEqual(field, clone) clone.data[0][0] = 666 self.assertNotEqual(field, clone)
def test_deepcopy_field_deferred(self): field = next(pp.load(self.filename)) clone = deepcopy(field) self.assertEqual(field, clone) clone.lbyr = 666 self.assertNotEqual(field, clone)
def test_copy_field_deferred(self): field = next(pp.load(self.filename)) clone = field.copy() self.assertEqual(field, clone) clone.lbyr = 666 self.assertNotEqual(field, clone)
def test_deepcopy_field_deferred(self): field = pp.load(self.filename).next() clone = deepcopy(field) self.assertEqual(field, clone) clone.lbyr = 666 self.assertNotEqual(field, clone)
def setUp(self): self.original_pp_filepath = tests.get_data_path( ('PP', 'aPPglob1', 'global.pp')) self.r = list(pp.load(self.original_pp_filepath))
def setUp(self): self.original_pp_filepath = tests.get_data_path(('PP', 'aPPglob1', 'global.pp')) self.r = list(pp.load(self.original_pp_filepath))
def test_deepcopy_field_non_deferred(self): field = next(pp.load(self.filename, True)) clone = deepcopy(field) self.assertEqual(field, clone) clone.data[0][0] = 666 self.assertNotEqual(field, clone)
def setUp(self): self.original_pp_filepath = tests.get_data_path( ("PP", "aPPglob1", "global.pp")) self.r = list(pp.load(self.original_pp_filepath))
def setUp(self): self.original_pp_filepath = tests.get_data_path( ('PP', 'ukV1', 'ukVpmslont.pp')) self.r = list(pp.load(self.original_pp_filepath))[0:5]
def setUp(self): self.original_pp_filepath = tests.get_data_path( ("PP", "ukV1", "ukVpmslont.pp")) self.r = list(pp.load(self.original_pp_filepath))[0:5]