コード例 #1
0
 def test_masked_data__insitu(self):
     # Test that the error is raised in the right place.
     with self.temp_filename(".nc") as nc_path:
         saver = Saver(nc_path, "NETCDF4")
         with self.assertRaisesRegex(ValueError, self.exp_emsg):
             saver._create_generic_cf_array_var(self.cube, self.names_map,
                                                self.cm)
コード例 #2
0
 def test_transverse_mercator_no_ellipsoid(self):
     # Create a Cube with a transverse Mercator coordinate system.
     cube = self._transverse_mercator_cube()
     with self.temp_filename(".nc") as nc_path:
         with Saver(nc_path, "NETCDF4") as saver:
             saver.write(cube)
         self.assertCDL(nc_path)
コード例 #3
0
ファイル: test_Saver.py プロジェクト: znicholls/iris
 def test_stereographic_no_ellipsoid(self):
     # Create a Cube with a stereographic coordinate system.
     cube = self._stereo_cube()
     with self.temp_filename('.nc') as nc_path:
         with Saver(nc_path, 'NETCDF4') as saver:
             saver.write(cube)
         self.assertCDL(nc_path)
コード例 #4
0
ファイル: test_Saver.py プロジェクト: znicholls/iris
 def test_masked_data__insitu(self):
     # Test that the error is raised in the right place.
     with self.temp_filename('.nc') as nc_path:
         saver = Saver(nc_path, 'NETCDF4')
         with self.assertRaisesRegexp(ValueError, self.exp_emsg):
             saver._create_cf_cell_measure_variable(self.cube,
                                                    self.names_map, self.cm)
コード例 #5
0
ファイル: test_Saver.py プロジェクト: payton1004/iris
 def test_big_endian(self):
     # Create a Cube with big-endian data.
     cube = self._simple_cube('>f4')
     with self.temp_filename('.nc') as nc_path:
         with Saver(nc_path, 'NETCDF4') as saver:
             saver.write(cube)
         self.assertCDL(nc_path, basename='endian', flags='')
コード例 #6
0
ファイル: test_Saver.py プロジェクト: znicholls/iris
 def test_mercator_no_ellipsoid(self):
     # Create a Cube with a Mercator coordinate system.
     cube = self._mercator_cube()
     with self.temp_filename('.nc') as nc_path:
         with Saver(nc_path, 'NETCDF4') as saver:
             saver.write(cube)
         self.assertCDL(nc_path)
コード例 #7
0
ファイル: test_Saver.py プロジェクト: znicholls/iris
 def test_little_endian(self):
     # Create a Cube with little-endian data.
     cube = self._simple_cube('<f4')
     with self.temp_filename('.nc') as nc_path:
         with Saver(nc_path, 'NETCDF4') as saver:
             saver.write(cube)
         result_path = self.result_path('endian', 'cdl')
         self.assertCDL(nc_path, result_path, flags='')
コード例 #8
0
ファイル: test_Saver.py プロジェクト: znicholls/iris
 def test_transverse_mercator(self):
     # Create a Cube with a transverse Mercator coordinate system.
     ellipsoid = GeogCS(6377563.396, 6356256.909)
     cube = self._transverse_mercator_cube(ellipsoid)
     with self.temp_filename('.nc') as nc_path:
         with Saver(nc_path, 'NETCDF4') as saver:
             saver.write(cube)
         self.assertCDL(nc_path)
コード例 #9
0
 def test_big_endian(self):
     # Create a Cube with big-endian data.
     cube = self._simple_cube('>f4')
     with self.temp_filename('nc') as nc_path:
         with Saver(nc_path, 'NETCDF4') as saver:
             saver.write(cube)
         self.assertCDL(nc_path, ('unit', 'fileformats', 'netcdf', 'Saver',
                                  'write', 'endian.cdl'), flags='')
コード例 #10
0
 def test_big_endian(self):
     # Create a Cube with big-endian data.
     cube = self._simple_cube(">f4")
     with self.temp_filename(".nc") as nc_path:
         with Saver(nc_path, "NETCDF4") as saver:
             saver.write(cube)
         result_path = self.result_path("endian", "cdl")
         self.assertCDL(nc_path, result_path, flags="")
コード例 #11
0
ファイル: test_Saver.py プロジェクト: znicholls/iris
 def test_stereographic(self):
     # Create a Cube with a stereographic coordinate system.
     ellipsoid = GeogCS(6377563.396, 6356256.909)
     cube = self._stereo_cube(ellipsoid)
     with self.temp_filename('.nc') as nc_path:
         with Saver(nc_path, 'NETCDF4') as saver:
             saver.write(cube)
         self.assertCDL(nc_path)
コード例 #12
0
ファイル: test_Saver.py プロジェクト: juanmcloaiza/iris_fork
 def test_default_unlimited_dimensions(self):
     cube = self._simple_cube('>f4')
     with self.temp_filename('.nc') as nc_path:
         with Saver(nc_path, 'NETCDF4') as saver:
             saver.write(cube)
         ds = nc.Dataset(nc_path)
         self.assertTrue(ds.dimensions['dim0'].isunlimited())
         self.assertFalse(ds.dimensions['dim1'].isunlimited())
         ds.close()
コード例 #13
0
 def test_no_unlimited_dimensions(self):
     cube = self._simple_cube(">f4")
     with self.temp_filename(".nc") as nc_path:
         with Saver(nc_path, "NETCDF4") as saver:
             saver.write(cube, unlimited_dimensions=None)
         ds = nc.Dataset(nc_path)
         for dim in ds.dimensions.values():
             self.assertFalse(dim.isunlimited())
         ds.close()
コード例 #14
0
ファイル: test_netcdf.py プロジェクト: lengyanyanjing/iris
 def test_lazy_preserved_save(self):
     fpath = tests.get_data_path(
         ('NetCDF', 'label_and_climate', 'small_FC_167_mon_19601101.nc'))
     acube = iris.load_cube(fpath)
     self.assertTrue(acube.has_lazy_data())
     with self.temp_filename('.nc') as nc_path:
         with Saver(nc_path, 'NETCDF4') as saver:
             saver.write(acube)
     self.assertTrue(acube.has_lazy_data())
コード例 #15
0
ファイル: test_Saver.py プロジェクト: znicholls/iris
 def test_valid_range_and_valid_min_valid_max_provided(self):
     # Conflicting attributes should raise a suitable exception.
     self.data = self.data.astype('int8')
     self.container.attributes['valid_range'] = [1, 2]
     self.container.attributes['valid_min'] = [1]
     msg = 'Both "valid_range" and "valid_min"'
     with Saver(mock.Mock(), 'NETCDF4') as saver:
         with self.assertRaisesRegexp(ValueError, msg):
             saver.check_attribute_compliance(self.container, self.data)
コード例 #16
0
 def test_lazy_preserved_save(self):
     fpath = tests.get_data_path(
         ("NetCDF", "label_and_climate", "small_FC_167_mon_19601101.nc"))
     acube = iris.load_cube(fpath, "air_temperature")
     self.assertTrue(acube.has_lazy_data())
     with self.temp_filename(".nc") as nc_path:
         with Saver(nc_path, "NETCDF4") as saver:
             saver.write(acube)
     self.assertTrue(acube.has_lazy_data())
コード例 #17
0
 def test_transverse_mercator_no_ellipsoid(self):
     # Create a Cube with a transverse Mercator coordinate system.
     cube = self._transverse_mercator_cube()
     with self.temp_filename('nc') as nc_path:
         with Saver(nc_path, 'NETCDF4') as saver:
             saver.write(cube)
         self.assertCDL(nc_path, ('unit', 'fileformats', 'netcdf', 'Saver',
                                  'write',
                                  'transverse_mercator_no_ellipsoid.cdl'))
コード例 #18
0
ファイル: test_Saver.py プロジェクト: znicholls/iris
 def test_no_unlimited_dimensions(self):
     cube = self._simple_cube('>f4')
     with self.temp_filename('.nc') as nc_path:
         with Saver(nc_path, 'NETCDF4') as saver:
             saver.write(cube, unlimited_dimensions=None)
         ds = nc.Dataset(nc_path)
         for dim in six.itervalues(ds.dimensions):
             self.assertFalse(dim.isunlimited())
         ds.close()
コード例 #19
0
ファイル: test_Saver.py プロジェクト: pdearnshaw/iris
 def test_valid_range_and_valid_min_valid_max_provided(self):
     # Conflicting attributes should raise a suitable exception.
     self.data_dtype = np.dtype("int8")
     self.container.attributes["valid_range"] = [1, 2]
     self.container.attributes["valid_min"] = [1]
     msg = 'Both "valid_range" and "valid_min"'
     with Saver(mock.Mock(), "NETCDF4") as saver:
         with self.assertRaisesRegex(ValueError, msg):
             saver.check_attribute_compliance(self.container,
                                              self.data_dtype)
コード例 #20
0
 def test_default_unlimited_dimensions(self):
     # Default is no unlimited dimensions.
     cube = self._simple_cube(">f4")
     with self.temp_filename(".nc") as nc_path:
         with Saver(nc_path, "NETCDF4") as saver:
             saver.write(cube)
         ds = nc.Dataset(nc_path)
         self.assertFalse(ds.dimensions["dim0"].isunlimited())
         self.assertFalse(ds.dimensions["dim1"].isunlimited())
         ds.close()
コード例 #21
0
 def test_reserved_attributes(self):
     cube = self._simple_cube(">f4")
     cube.attributes["dimensions"] = "something something_else"
     with self.temp_filename(".nc") as nc_path:
         with Saver(nc_path, "NETCDF4") as saver:
             saver.write(cube)
         ds = nc.Dataset(nc_path)
         res = ds.getncattr("dimensions")
         ds.close()
         self.assertEqual(res, "something something_else")
コード例 #22
0
ファイル: test_Saver.py プロジェクト: znicholls/iris
 def test_reserved_attributes(self):
     cube = self._simple_cube('>f4')
     cube.attributes['dimensions'] = 'something something_else'
     with self.temp_filename('.nc') as nc_path:
         with Saver(nc_path, 'NETCDF4') as saver:
             saver.write(cube)
         ds = nc.Dataset(nc_path)
         res = ds.getncattr('dimensions')
         ds.close()
         self.assertEqual(res, 'something something_else')
コード例 #23
0
ファイル: test_Saver.py プロジェクト: znicholls/iris
    def test_valid_max_saved(self):
        cube = tests.stock.lat_lon_cube()
        cube.data = cube.data.astype('int32')

        cube.coord(axis='x').attributes['valid_max'] = 2
        with self.temp_filename('.nc') as nc_path:
            with Saver(nc_path, 'NETCDF4') as saver:
                saver.write(cube, unlimited_dimensions=[])
            ds = nc.Dataset(nc_path)
            self.assertArrayEqual(ds.variables['longitude'].valid_max, 2)
            ds.close()
コード例 #24
0
ファイル: test_Saver.py プロジェクト: znicholls/iris
    def test_valid_min_saved(self):
        cube = tests.stock.lat_lon_cube()
        cube.data = cube.data.astype('int32')

        cube.attributes['valid_min'] = 1
        with self.temp_filename('.nc') as nc_path:
            with Saver(nc_path, 'NETCDF4') as saver:
                saver.write(cube, unlimited_dimensions=[])
            ds = nc.Dataset(nc_path)
            self.assertArrayEqual(ds.valid_min, 1)
            ds.close()
コード例 #25
0
    def test_valid_max_saved(self):
        cube = tests.stock.lat_lon_cube()
        cube.data = cube.data.astype("int32")

        cube.coord(axis="x").attributes["valid_max"] = 2
        with self.temp_filename(".nc") as nc_path:
            with Saver(nc_path, "NETCDF4") as saver:
                saver.write(cube, unlimited_dimensions=[])
            ds = nc.Dataset(nc_path)
            self.assertArrayEqual(ds.variables["longitude"].valid_max, 2)
            ds.close()
コード例 #26
0
    def test_valid_range_saved(self):
        cube = tests.stock.lat_lon_cube()
        cube.data = cube.data.astype("int32")

        vrange = np.array([1, 2], dtype="int32")
        cube.attributes["valid_range"] = vrange
        with self.temp_filename(".nc") as nc_path:
            with Saver(nc_path, "NETCDF4") as saver:
                saver.write(cube, unlimited_dimensions=[])
            ds = nc.Dataset(nc_path)
            self.assertArrayEqual(ds.valid_range, vrange)
            ds.close()
コード例 #27
0
ファイル: test_Saver.py プロジェクト: znicholls/iris
 def _netCDF_var(self, cube, **kwargs):
     # Get the netCDF4 Variable for a cube from a temp file
     standard_name = cube.standard_name
     with self.temp_filename('.nc') as nc_path:
         with Saver(nc_path, 'NETCDF4') as saver:
             saver.write(cube, **kwargs)
         ds = nc.Dataset(nc_path)
         var, = [
             var for var in ds.variables.values()
             if var.standard_name == standard_name
         ]
         yield var
コード例 #28
0
ファイル: test_Saver.py プロジェクト: juanmcloaiza/iris_fork
 def test_zlib(self):
     cube = self._simple_cube('>f4')
     with mock.patch('iris.fileformats.netcdf.netCDF4') as api:
         with Saver('/dummy/path', 'NETCDF4') as saver:
             saver.write(cube, zlib=True)
     dataset = api.Dataset.return_value
     create_var_calls = mock.call.createVariable(
         'air_pressure_anomaly', np.dtype('float32'), ['dim0', 'dim1'],
         fill_value=None, shuffle=True, least_significant_digit=None,
         contiguous=False, zlib=True, fletcher32=False,
         endian='native', complevel=4, chunksizes=None).call_list()
     dataset.assert_has_calls(create_var_calls)
コード例 #29
0
ファイル: test_Saver.py プロジェクト: juanmcloaiza/iris_fork
 def test_least_significant_digit(self):
     cube = Cube(np.array([1.23, 4.56, 7.89]),
                 standard_name='surface_temperature', long_name=None,
                 var_name='temp', units='K')
     with self.temp_filename('.nc') as nc_path:
         with Saver(nc_path, 'NETCDF4') as saver:
             saver.write(cube, least_significant_digit=1)
         cube_saved = iris.load_cube(nc_path)
         self.assertEqual(
             cube_saved.attributes['least_significant_digit'], 1)
         self.assertFalse(np.all(cube.data == cube_saved.data))
         self.assertArrayAllClose(cube.data, cube_saved.data, 0.1)
コード例 #30
0
ファイル: test_Saver.py プロジェクト: juanmcloaiza/iris_fork
 def test_custom_unlimited_dimensions(self):
     cube = self._transverse_mercator_cube()
     unlimited_dimensions = ['projection_y_coordinate',
                             'projection_x_coordinate']
     # test coordinates by name
     with self.temp_filename('.nc') as nc_path:
         with Saver(nc_path, 'NETCDF4') as saver:
             saver.write(cube, unlimited_dimensions=unlimited_dimensions)
         ds = nc.Dataset(nc_path)
         for dim in unlimited_dimensions:
             self.assertTrue(ds.dimensions[dim].isunlimited())
         ds.close()
     # test coordinate arguments
     with self.temp_filename('.nc') as nc_path:
         coords = [cube.coord(dim) for dim in unlimited_dimensions]
         with Saver(nc_path, 'NETCDF4') as saver:
             saver.write(cube, unlimited_dimensions=coords)
         ds = nc.Dataset(nc_path)
         for dim in unlimited_dimensions:
             self.assertTrue(ds.dimensions[dim].isunlimited())
         ds.close()