def test_relative_vorticity_distance(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: returncf x_min = 0.0 x_max = 100.0 dx = 1.0 x_1d = numpy.arange(x_min, x_max, dx) size = x_1d.size data_1d = x_1d * 2.0 + 1.0 data_2d = numpy.broadcast_to(data_1d[numpy.newaxis, :], (size, size)) dim_x = cf.DimensionCoordinate(data=cf.Data(x_1d, 'm'), properties={'axis': 'X'}) dim_y = cf.DimensionCoordinate(data=cf.Data(x_1d, 'm'), properties={'axis': 'Y'}) u = cf.Field() X = u.set_construct(cf.DomainAxis(size=dim_x.data.size)) Y = u.set_construct(cf.DomainAxis(size=dim_y.data.size)) u.set_construct(dim_x, axes=[X]) u.set_construct(dim_y, axes=[Y]) u.set_data(cf.Data(data_2d, 'm/s'), axes=('Y', 'X')) v = cf.Field() v.set_construct(cf.DomainAxis(size=dim_x.data.size)) v.set_construct(cf.DomainAxis(size=dim_y.data.size)) v.set_construct(dim_x, axes=[X]) v.set_construct(dim_y, axes=[Y]) v.set_data(cf.Data(data_2d, 'm/s'), axes=('X', 'Y')) rv = cf.relative_vorticity(u, v, one_sided_at_boundary=True) self.assertTrue((rv.array == 0.0).all())
def save_datasets(self, datasets, filename, **kwargs): """Save all datasets to one or more files) """ fields = [] shapes = {} for dataset in datasets: if dataset.shape in shapes: domain = shapes[dataset.shape] else: lines, pixels = dataset.shape # Create a grid_latitude dimension coordinate line_coord = cf.DimensionCoordinate( data=cf.Data(np.arange(lines), '1')) pixel_coord = cf.DimensionCoordinate( data=cf.Data(np.arange(pixels), '1')) domain = cf.Domain(dim={ 'lines': line_coord, 'pixels': pixel_coord }, ) shapes[dataset.shape] = domain data = cf.Data(dataset, dataset.info['units']) properties = {'standard_name': dataset.info['standard_name']} fields.append( cf.Field(properties=properties, data=data, axes=['lines', 'pixels'], domain=domain)) cf.write(fields, filename, fmt='NETCDF4')
def test_relative_vorticity_distance(self): x_min = 0.0 x_max = 100.0 dx = 1.0 x_1d = numpy.arange(x_min, x_max, dx) size = x_1d.size data_1d = x_1d * 2.0 + 1.0 data_2d = numpy.broadcast_to(data_1d[numpy.newaxis, :], (size, size)) dim_x = cf.DimensionCoordinate( data=cf.Data(x_1d, "m"), properties={"axis": "X"} ) dim_y = cf.DimensionCoordinate( data=cf.Data(x_1d, "m"), properties={"axis": "Y"} ) u = cf.Field() X = u.set_construct(cf.DomainAxis(size=dim_x.data.size)) Y = u.set_construct(cf.DomainAxis(size=dim_y.data.size)) u.set_construct(dim_x, axes=[X]) u.set_construct(dim_y, axes=[Y]) u.set_data(cf.Data(data_2d, "m/s"), axes=("Y", "X")) v = cf.Field() v.set_construct(cf.DomainAxis(size=dim_x.data.size)) v.set_construct(cf.DomainAxis(size=dim_y.data.size)) v.set_construct(dim_x, axes=[X]) v.set_construct(dim_y, axes=[Y]) v.set_data(cf.Data(data_2d, "m/s"), axes=("X", "Y")) rv = cf.relative_vorticity(u, v, one_sided_at_boundary=True) self.assertTrue((rv.array == 0.0).all())
def test_DimensionCoordinate_convert_reference_time(self): d = cf.DimensionCoordinate() d.set_data( cf.Data([1, 2, 3], 'months since 2004-1-1', calendar='gregorian')) self.assertTrue((d.array == [1., 2, 3]).all()) e = d.copy() self.assertIsNone( e.convert_reference_time(calendar_months=True, inplace=True)) f = d.convert_reference_time(calendar_months=True) for x in (e, f): self.assertTrue((x.array == [31., 60., 91.]).all()) self.assertTrue((x.datetime_array == [ cf.dt('2004-02-01 00:00:00', calendar='gregorian'), cf.dt('2004-03-01 00:00:00', calendar='gregorian'), cf.dt('2004-04-01 00:00:00', calendar='gregorian') ]).all()) self.assertTrue((d.array == [1., 2, 3]).all()) d = cf.DimensionCoordinate() d.set_data( cf.Data([1, 2, 3], 'months since 2004-1-1', calendar='360_day')) e = d.copy() self.assertIsNone( e.convert_reference_time(calendar_months=True, inplace=True)) f = d.convert_reference_time(calendar_months=True) for x in (e, f): self.assertTrue((x.array == [30., 60., 90.]).all()) self.assertTrue((x.datetime_array == [ cf.dt('2004-02-01 00:00:00', calendar='360_day'), cf.dt('2004-03-01 00:00:00', calendar='360_day'), cf.dt('2004-04-01 00:00:00', calendar='360_day') ]).all()) self.assertTrue((d.array == [1., 2, 3]).all()) d = cf.DimensionCoordinate() d.set_data( cf.Data([1, 2, 3], 'months since 2004-1-1', calendar='noleap')) e = d.copy() self.assertIsNone( e.convert_reference_time(calendar_months=True, inplace=True)) f = d.convert_reference_time(calendar_months=True) for x in (e, f): self.assertTrue((x.array == [31., 59., 90.]).all()) self.assertTrue((x.datetime_array == [ cf.dt('2004-02-01 00:00:00', calendar='noleap'), cf.dt('2004-03-01 00:00:00', calendar='noleap'), cf.dt('2004-04-01 00:00:00', calendar='noleap') ]).all()) self.assertTrue((d.array == [1., 2, 3]).all())
def test_DimensionCoordinate_convert_reference_time(self): d = cf.DimensionCoordinate() d.set_data( cf.Data([1, 2, 3], "months since 2004-1-1", calendar="gregorian")) self.assertTrue((d.array == [1.0, 2, 3]).all()) e = d.copy() self.assertIsNone( e.convert_reference_time(calendar_months=True, inplace=True)) f = d.convert_reference_time(calendar_months=True) for x in (e, f): self.assertTrue((x.array == [31.0, 60.0, 91.0]).all()) self.assertTrue((x.datetime_array == [ cf.dt("2004-02-01 00:00:00", calendar="gregorian"), cf.dt("2004-03-01 00:00:00", calendar="gregorian"), cf.dt("2004-04-01 00:00:00", calendar="gregorian"), ]).all()) self.assertTrue((d.array == [1.0, 2, 3]).all()) d = cf.DimensionCoordinate() d.set_data( cf.Data([1, 2, 3], "months since 2004-1-1", calendar="360_day")) e = d.copy() self.assertIsNone( e.convert_reference_time(calendar_months=True, inplace=True)) f = d.convert_reference_time(calendar_months=True) for x in (e, f): self.assertTrue((x.array == [30.0, 60.0, 90.0]).all()) self.assertTrue((x.datetime_array == [ cf.dt("2004-02-01 00:00:00", calendar="360_day"), cf.dt("2004-03-01 00:00:00", calendar="360_day"), cf.dt("2004-04-01 00:00:00", calendar="360_day"), ]).all()) self.assertTrue((d.array == [1.0, 2, 3]).all()) d = cf.DimensionCoordinate() d.set_data( cf.Data([1, 2, 3], "months since 2004-1-1", calendar="noleap")) e = d.copy() self.assertIsNone( e.convert_reference_time(calendar_months=True, inplace=True)) f = d.convert_reference_time(calendar_months=True) for x in (e, f): self.assertTrue((x.array == [31.0, 59.0, 90.0]).all()) self.assertTrue((x.datetime_array == [ cf.dt("2004-02-01 00:00:00", calendar="noleap"), cf.dt("2004-03-01 00:00:00", calendar="noleap"), cf.dt("2004-04-01 00:00:00", calendar="noleap"), ]).all()) self.assertTrue((d.array == [1.0, 2, 3]).all())
def test_keyword_deprecation(self): # Use as test case 'i' kwarg, the deprecated old name for # 'inplace': a = cf.Data([list(range(100))]) a.squeeze(inplace=True) # new way to specify operation tested below b = cf.Data([list(range(100))]) with self.assertRaises(cf.functions.DeprecationError): b.squeeze(i=True)
def test_DSG_create_contiguous(self): # Define the ragged array values ragged_array = numpy.array([1, 3, 4, 3, 6], dtype="float32") # Define the count array values count_array = [2, 3] # Initialise the count variable count_variable = cf.Count(data=cf.Data(count_array)) count_variable.set_property( "long_name", "number of obs for this timeseries" ) # Initialise the contiguous ragged array object array = cf.RaggedContiguousArray( compressed_array=cf.Data(ragged_array), shape=(2, 3), size=6, ndim=2, count_variable=count_variable, ) # Initialize the auxiliary coordinate construct with the ragged # array and set some properties z = cf.AuxiliaryCoordinate( data=cf.Data(array), properties={ "standard_name": "height", "units": "km", "positive": "up", }, ) self.assertTrue( ( z.data.array == numpy.ma.masked_array( data=[[1.0, 3.0, 99], [4.0, 3.0, 6.0]], mask=[[False, False, True], [False, False, False]], fill_value=1e20, dtype="float32", ) ).all() ) self.assertEqual(z.data.get_compression_type(), "ragged contiguous") self.assertTrue( ( z.data.compressed_array == numpy.array([1.0, 3.0, 4.0, 3.0, 6.0], dtype="float32") ).all() ) self.assertTrue( (z.data.get_count().data.array == numpy.array([2, 3])).all() )
def test_relative_vorticity_latlong(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: return lat_min = -90.0 lat_max = 90.0 dlat = 1.0 lat_1d = numpy.arange(lat_min, lat_max, dlat) lat_size = lat_1d.size lon_min = 0.0 lon_max = 359.0 dlon = 1.0 lon_1d = numpy.arange(lon_min, lon_max, dlon) lon_size = lon_1d.size u_1d = lat_1d * 2.0 + 1.0 u_2d = numpy.broadcast_to(lat_1d[numpy.newaxis, :], (lon_size, lat_size)) v_1d = lon_1d * 2.0 + 1.0 v_2d = numpy.broadcast_to(lon_1d[:, numpy.newaxis], (lon_size, lat_size)) v_2d = v_2d * numpy.cos(lat_1d * numpy.pi / 180.0)[numpy.newaxis, :] rv_array = (u_2d / cf.Data(6371229.0, 'meters') * numpy.tan(lat_1d * numpy.pi / 180.0)[numpy.newaxis, :]) dim_x = cf.DimensionCoordinate(data=cf.Data(lon_1d, 'degrees_east'), properties={'axis': 'X'}) dim_y = cf.DimensionCoordinate(data=cf.Data(lat_1d, 'degrees_north'), properties={'axis': 'Y'}) u = cf.Field() u.set_construct(cf.DomainAxis(size=lon_1d.size)) u.set_construct(cf.DomainAxis(size=lat_1d.size)) u.set_construct(dim_x) u.set_construct(dim_y) u.set_data(cf.Data(u_2d, 'm/s'), axes=('X', 'Y')) u.cyclic('X', period=360.0) v = cf.Field() v.set_construct(cf.DomainAxis(size=lon_1d.size)) v.set_construct(cf.DomainAxis(size=lat_1d.size)) v.set_construct(dim_x) v.set_construct(dim_y) v.set_data(cf.Data(v_2d, 'm/s'), axes=('X', 'Y')) v.cyclic('X', period=360.0) rv = cf.relative_vorticity(u, v, wrap=True) self.assertTrue(numpy.allclose(rv.array, rv_array))
def test_relative_vorticity_latlong(self): lat_min = -90.0 lat_max = 90.0 dlat = 1.0 lat_1d = numpy.arange(lat_min, lat_max, dlat) lat_size = lat_1d.size lon_min = 0.0 lon_max = 359.0 dlon = 1.0 lon_1d = numpy.arange(lon_min, lon_max, dlon) lon_size = lon_1d.size u_1d = lat_1d * 2.0 + 1.0 u_2d = numpy.broadcast_to(u_1d[numpy.newaxis, :], (lon_size, lat_size)) v_1d = lon_1d * 2.0 + 1.0 v_2d = numpy.broadcast_to(v_1d[:, numpy.newaxis], (lon_size, lat_size)) v_2d = v_2d * numpy.cos(lat_1d * numpy.pi / 180.0)[numpy.newaxis, :] rv_array = ( u_2d / cf.Data(6371229.0, "meters") * numpy.tan(lat_1d * numpy.pi / 180.0)[numpy.newaxis, :] ) dim_x = cf.DimensionCoordinate( data=cf.Data(lon_1d, "degrees_east"), properties={"axis": "X"} ) dim_y = cf.DimensionCoordinate( data=cf.Data(lat_1d, "degrees_north"), properties={"axis": "Y"} ) u = cf.Field() u.set_construct(cf.DomainAxis(size=lon_1d.size)) u.set_construct(cf.DomainAxis(size=lat_1d.size)) u.set_construct(dim_x) u.set_construct(dim_y) u.set_data(cf.Data(u_2d, "m/s"), axes=("X", "Y")) u.cyclic("X", period=360.0) v = cf.Field() v.set_construct(cf.DomainAxis(size=lon_1d.size)) v.set_construct(cf.DomainAxis(size=lat_1d.size)) v.set_construct(dim_x) v.set_construct(dim_y) v.set_data(cf.Data(v_2d, "m/s"), axes=("X", "Y")) v.cyclic("X", period=360.0) rv = cf.relative_vorticity(u, v, wrap=False) self.assertTrue(numpy.allclose(rv.array, rv_array))
def _parse_cmip6_properties(cim2_properties, global_attributes, time_coords): """Extends cim2 proeprty set with CMIP6 specific properties. """ cim2_properties.update( zip(['parent_realization_index', 'parent_initialization_index', 'parent_physics_index', 'parent_forcing_index'], map(int, re.findall('\d+', global_attributes.get('parent_variant_label', 'none'))))) # parent_time_units parent_time_units = global_attributes.get('parent_time_units') if parent_time_units in (None, 'no parent'): # parent_time_units has not been set in file, so they are # assumed to be the same as the child time units parent_time_units = time_coords.Units else: # parent_time_units have been set in file m = re.match('(.*) *\((.*?)\)', parent_time_units) if m: parent_time_units = cf.Units(*m.groups()) else: parent_time_units = cf.Units(parent_time_units, cim2_properties['calendar']) # ---------------------------------------------------------------- # CIM2 branch_time_in_parent # ---------------------------------------------------------------- branch_time_in_parent = global_attributes.get('branch_time_in_parent') if branch_time_in_parent is not None: if isinstance(branch_time_in_parent, basestring): # Fix in case branch_time_in_parent is a string # print "WARNING: branch_time_in_parent is a string, converting to float" branch_time_in_parent = float(branch_time_in_parent) x = cf.Data([branch_time_in_parent], units=parent_time_units).dtarray[0] cim2_properties['branch_time_in_parent'] = str(x) # ---------------------------------------------------------------- # CIM2 branch_time_in_child # ---------------------------------------------------------------- branch_time_in_child = global_attributes.get('branch_time_in_child') if branch_time_in_child is not None: if not isinstance(branch_time_in_child, float): # Fix in case branch_time_in_child is a string # print "WARNING: branch_time_in_child is a {}, converting to float".format(branch_time_in_child.__class__.__name__) branch_time_in_child = float(branch_time_in_child) x = cf.Data([branch_time_in_child], units=time_coords.Units).dtarray[0] cim2_properties['branch_time_in_child'] = str(x)
def test_DSG_create_contiguous(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: return # Define the ragged array values ragged_array = numpy.array([1, 3, 4, 3, 6], dtype='float32') # Define the count array values count_array = [2, 3] # Initialise the count variable count_variable = cf.Count(data=cf.Data(count_array)) count_variable.set_property('long_name', 'number of obs for this timeseries') # Initialise the contiguous ragged array object array = cf.RaggedContiguousArray( compressed_array=cf.Data(ragged_array), shape=(2, 3), size=6, ndim=2, count_variable=count_variable) # Initialize the auxiliary coordinate construct with the ragged # array and set some properties z = cf.AuxiliaryCoordinate(data=cf.Data(array), properties={ 'standard_name': 'height', 'units': 'km', 'positive': 'up' }) self.assertTrue( (z.data.array == numpy.ma.masked_array(data=[[1.0, 3.0, 99], [4.0, 3.0, 6.0]], mask=[[False, False, True], [False, False, False]], fill_value=1e+20, dtype='float32')).all()) self.assertEqual(z.data.get_compression_type(), 'ragged contiguous') self.assertTrue( (z.data.compressed_array == numpy.array([1., 3., 4., 3., 6.], dtype='float32')).all()) self.assertTrue( (z.data.get_count().data.array == numpy.array([2, 3])).all())
def test_write_reference_datetime(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: return for reference_datetime in ("1751-2-3", "1492-12-30"): for chunksize in self.chunk_sizes: cf.chunksize(chunksize) f = cf.read(self.filename)[0] t = cf.DimensionCoordinate( data=cf.Data([123], "days since 1750-1-1")) t.standard_name = "time" axisT = f.set_construct(cf.DomainAxis(1)) f.set_construct(t, axes=[axisT]) cf.write( f, tmpfile, fmt="NETCDF4", reference_datetime=reference_datetime, ) g = cf.read(tmpfile)[0] t = g.dimension_coordinate("T") self.assertEqual( t.Units, cf.Units("days since " + reference_datetime), ("Units written were " + repr(t.Units.reftime) + " not " + repr(reference_datetime)), ) # --- End: for cf.chunksize(self.original_chunksize)
def test_Query_as_where_condition(self): """Check queries work correctly as conditions in 'where' method.""" # TODO: extend test; added as-is to capture a specific bug (now fixed) s_data = cf.Data([30, 60, 90], 'second') m_lt_query = cf.lt(1, units="minute") s_lt_query = cf.lt(60, units="second") m_ge_query = cf.ge(1, units="minute") s_ge_query = cf.ge(60, units="second") for query_pair in [ (m_lt_query, s_lt_query), (m_ge_query, s_ge_query)]: m_query, s_query = query_pair equal_units_where = s_data.data.where(s_query, 0) mixed_units_where = s_data.data.where(m_query, 0) self.assertTrue( (mixed_units_where.array == equal_units_where.array).all() ) equal_units_where_masked = s_data.data.where(s_query, cf.masked) mixed_units_where_masked = s_data.data.where(m_query, cf.masked) self.assertEqual( mixed_units_where_masked.count(), equal_units_where_masked.count() )
def test_write_reference_datetime(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: return for reference_datetime in ('1751-2-3', '1492-12-30'): for chunksize in self.chunk_sizes: cf.chunksize(chunksize) f = cf.read(self.filename)[0] t = cf.DimensionCoordinate( data=cf.Data([123], 'days since 1750-1-1') ) t.standard_name = 'time' axisT = f.set_construct(cf.DomainAxis(1)) f.set_construct(t, axes=[axisT]) cf.write(f, tmpfile, fmt='NETCDF4', reference_datetime=reference_datetime) g = cf.read(tmpfile)[0] t = g.dimension_coordinate('T') self.assertEqual( t.Units, cf.Units('days since ' + reference_datetime), ('Units written were ' + repr(t.Units.reftime) + ' not ' + repr(reference_datetime))) # --- End: for cf.chunksize(self.original_chunksize)
def setUp(self): self.filename = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'test_file.nc') aux1 = cf.AuxiliaryCoordinate() aux1.standard_name = 'latitude' a = numpy.array( [-30, -23.5, -17.8123, -11.3345, -0.7, -0.2, 0, 0.2, 0.7, 11.30003, 17.8678678, 23.5, 30] ) aux1.set_data(cf.Data(a, 'degrees_north')) bounds = cf.Bounds() b = numpy.empty(a.shape + (2,)) b[:, 0] = a - 0.1 b[:, 1] = a + 0.1 bounds.set_data(cf.Data(b)) aux1.set_bounds(bounds) self.aux1 = aux1
def test_Datetime_Data(self): d = cf.Data([1, 2, 3], "days since 2004-02-28") self.assertTrue((d < cf.dt(2005, 2, 28)).all()) with self.assertRaises(Exception): d < cf.dt(2005, 2, 29) with self.assertRaises(Exception): d < cf.dt(2005, 2, 29, calendar="360_day") d = cf.Data([1, 2, 3], "days since 2004-02-28", calendar="360_day") self.assertTrue((d < cf.dt(2005, 2, 28)).all()) self.assertTrue((d < cf.dt(2005, 2, 29)).all()) self.assertTrue((d < cf.dt(2005, 2, 30)).all()) with self.assertRaises(Exception): d < cf.dt(2005, 2, 31) with self.assertRaises(Exception): d < cf.dt(2005, 2, 29, calendar="noleap")
def test_TimeDuration_arithmetic(self): self.assertEqual(cf.M() + cf.dt(2000, 1, 1), cf.dt(2000, 2, 1)) self.assertEqual(cf.M() * 8, cf.M(8)) self.assertEqual(cf.M() * 8.5, cf.M(8.5)) self.assertEqual(cf.dt(2000, 1, 1) + cf.M(), cf.dt(2000, 2, 1)) self.assertEqual(cf.dt(2000, 1, 1) - cf.M(), cf.dt(1999, 12, 1)) self.assertEqual( cf.M() + datetime.datetime(2000, 1, 1), cf.dt(2000, 2, 1, calendar="gregorian"), ) self.assertEqual( datetime.datetime(2000, 1, 1) + cf.M(), cf.dt(2000, 2, 1, calendar="gregorian"), ) self.assertEqual( datetime.datetime(2000, 1, 1) - cf.M(), cf.dt(1999, 12, 1, calendar="gregorian"), ) d = cf.dt(2000, 1, 1) d += cf.M() self.assertEqual(d, cf.dt(2000, 2, 1)) d -= cf.M() self.assertEqual(d, cf.dt(2000, 1, 1)) d = datetime.datetime(2000, 1, 1) d += cf.M() self.assertEqual(d, cf.dt(2000, 2, 1, calendar="gregorian")) d -= cf.M() self.assertEqual(d, cf.dt(2000, 1, 1, calendar="gregorian")) self.assertEqual(cf.M() * 8, cf.M(8)) self.assertEqual(cf.M() * 8.5, cf.M(8.5)) self.assertEqual(cf.M() / 2.0, cf.M(0.5)) self.assertEqual(cf.M(8) / 3, cf.M(8 / 3)) self.assertEqual(cf.M(8) // 3, cf.M(2)) # Test arithmetic involving Data as well as datetimes: da = cf.Data([2], units="days since 2000-01-01") dt = cf.TimeDuration(14, "day") t0 = da + dt t1 = dt + da self.assertEqual( t0, cf.dt(2000, 1, 17, calendar="gregorian"), ) self.assertEqual(t0, t1) t2 = dt - da t3 = da - dt self.assertEqual( t2, cf.dt(1999, 12, 20, calendar="gregorian"), ) self.assertEqual(t2, t3)
def _get_simulation_start_end_dates(dates, calendar): """Returns the start and end times of the simulation and return them as ISO8601-like strings. """ if dates: date = sorted(set(dates)) units = cf.Units('days since ' + str(dates[0]), calendar) dates = cf.Data(dates, units, dt=True) return str(dates.min().dtarray[0]), str(dates.max().dtarray[0]) return (None, None)
def test_DimensionCoordinate_set_data(self): x = cf.DimensionCoordinate() y = x.set_data(cf.Data([1, 2, 3])) self.assertIsNone(y) self.assertTrue(x.has_data()) # Test inplace x.del_data() y = x.set_data(cf.Data([1, 2, 3]), inplace=False) self.assertIsInstance(y, cf.DimensionCoordinate) self.assertFalse(x.has_data()) self.assertTrue(y.has_data()) # Exceptions should be raised for 0-d and N-d (N>=2) data with self.assertRaises(Exception): y = x.set_data(cf.Data([[1, 2, 3]])) with self.assertRaises(Exception): y = x.set_data(cf.Data(1))
def test_Query_object_units(self): """Check units are processed correctly in and from queries.""" equivalent_units = { 1000: ("m", "km"), 60: ("s", "minute"), } # keys are conversion factors; only use exact equivalents for test for conversion, equivalents in equivalent_units.items(): s_unit, l_unit = equivalents # s for smaller, l for larger # Case 1: only specify units to one component q1 = cf.Query("gt", cf.Data(1, s_unit)) q2 = cf.Query("ge", 1, units=s_unit) # Case 2: specify *equal* units across the two components q3 = cf.Query("lt", cf.Data(1, s_unit), units=s_unit) # Case 3: specify *equivalent* units across the two components q4 = cf.Query("le", cf.Data(1, s_unit), units=l_unit) # See also final Case 4, below. # A) test processed correctly inside unit itself for q in [q1, q2, q3, q4]: self.assertIsInstance(q, cf.query.Query) # TODO: should q4 should return s_ or l_unit? ATM is s_unit. self.assertEqual(q._value.Units._units, s_unit) with self.assertRaises(ValueError): # Case 4: provide non-equivalent units i.e. non-sensical query cf.Query("le", cf.Data(1, "m"), units="s") # B) Check units are processed correctly when a Query is evaluated q5 = cf.Query("eq", conversion, units=s_unit) # Pre-comparison, values should be converted for consistent units self.assertTrue( q5.evaluate(cf.Data([1, 2], l_unit)).equals(cf.Data([True, False])))
def test_AuxiliaryCoordinate_transpose(self): x = self.f.auxiliary_coordinate("longitude").copy() bounds = cf.Bounds( data=cf.Data(numpy.arange(9 * 10 * 4).reshape(9, 10, 4))) x.set_bounds(bounds) self.assertEqual(x.shape, (9, 10)) self.assertEqual(x.bounds.shape, (9, 10, 4)) y = x.transpose() self.assertEqual(y.shape, (10, 9)) self.assertEqual(y.bounds.shape, (10, 9, 4), y.bounds.shape) x.transpose([1, 0], inplace=True) self.assertEqual(x.shape, (10, 9)) self.assertEqual(x.bounds.shape, (10, 9, 4), x.bounds.shape)
def test_AuxiliaryCoordinate_transpose(self): f = cf.read(self.filename)[0] x = f.auxiliary_coordinates('longitude').value() bounds = cf.Bounds( data=cf.Data(numpy.arange(9 * 10 * 4).reshape(9, 10, 4))) x.set_bounds(bounds) self.assertEqual(x.shape, (9, 10)) self.assertEqual(x.bounds.shape, (9, 10, 4)) y = x.transpose() self.assertEqual(y.shape, (10, 9)) self.assertEqual(y.bounds.shape, (10, 9, 4), y.bounds.shape) x.transpose([1, 0], inplace=True) self.assertEqual(x.shape, (10, 9)) self.assertEqual(x.bounds.shape, (10, 9, 4), x.bounds.shape)
def test_AuxiliaryCoordinate_squeeze(self): x = self.f.auxiliary_coordinate("longitude").copy() bounds = cf.Bounds( data=cf.Data(numpy.arange(9 * 10 * 4).reshape(9, 10, 4))) x.set_bounds(bounds) x.insert_dimension(1, inplace=True) x.insert_dimension(0, inplace=True) self.assertEqual(x.shape, (1, 9, 1, 10)) self.assertEqual(x.bounds.shape, (1, 9, 1, 10, 4)) y = x.squeeze() self.assertEqual(y.shape, (9, 10)) self.assertEqual(y.bounds.shape, (9, 10, 4), y.bounds.shape) x.squeeze(2, inplace=True) self.assertEqual(x.shape, (1, 9, 10)) self.assertEqual(x.bounds.shape, (1, 9, 10, 4), x.bounds.shape)
def test_AuxiliaryCoordinate_squeeze(self): f = cf.read(self.filename)[0] x = f.auxiliary_coordinates('longitude').value() bounds = cf.Bounds( data=cf.Data(numpy.arange(9 * 10 * 4).reshape(9, 10, 4))) x.set_bounds(bounds) x.insert_dimension(1, inplace=True) x.insert_dimension(0, inplace=True) self.assertEqual(x.shape, (1, 9, 1, 10)) self.assertEqual(x.bounds.shape, (1, 9, 1, 10, 4)) y = x.squeeze() self.assertEqual(y.shape, (9, 10)) self.assertEqual(y.bounds.shape, (9, 10, 4), y.bounds.shape) x.squeeze(2, inplace=True) self.assertEqual(x.shape, (1, 9, 10)) self.assertEqual(x.bounds.shape, (1, 9, 10, 4), x.bounds.shape)
def wrap_netcdf(year_o, yearpart_o, var_o, standard_name_o, units_o): var_shape = var_o.shape # Define Coordinaties start_date = (datetime.datetime(year_o, 1, 1) + datetime.timedelta(yearpart_o)).strftime('%Y-%m-%d') if var_shape[0] == 1: dim0 = cf.DimensionCoordinate(properties={'standard_name': 'time'}, data=cf.Data( 0., cf.Units('days since ' + start_date, calendar='standard'))) elif var_shape[0] == 240: nc_time = (86400.0 / count_time / divt) * np.arange(count_time * divt) dim0 = cf.DimensionCoordinate(properties={'standard_name': 'time'}, data=cf.Data( nc_time, cf.Units('seconds since ' + start_date + ' 0:3:0', calendar='standard'))) elif var_shape[0] == 241: nc_time = (86400.0 / count_time / divt) * np.arange(count_time * divt + 1) dim0 = cf.DimensionCoordinate(properties={'standard_name': 'time'}, data=cf.Data( nc_time, cf.Units('seconds since ' + start_date + ' 0:0:0', calendar='standard'))) dim1 = cf.DimensionCoordinate(data=cf.Data(latitude, 'degrees_north'), properties={'standard_name': 'latitude'}) dim2 = cf.DimensionCoordinate(data=cf.Data(longitude, 'degrees_east'), properties={'standard_name': 'longitude'}) # Define cf.Field then insert variable and coordinates f = cf.Field(properties={'standard_name': standard_name_o}) f.insert_dim(dim0) f.insert_dim(dim1) f.insert_dim(dim2) data = cf.Data(var_o, units_o) f.insert_data(data) return f
q, t = cf.read('file.nc') t.data print(t.array) t.dtype t.ndim t.shape t.size print(t.domain_axes) t t.data.shape t.get_data_axes() data = t.del_data() t.has_data() t.set_data(data) t.data d = cf.Data([1, 2, 3], units='days since 2004-2-28') print(d.array) print(d.datetime_array) e = cf.Data([1, 2, 3], units='days since 2004-2-28', calendar='360_day') print(d.array) print(d.datetime_array) date_time = cf.dt(2004, 2, 29) date_time d = cf.Data(date_time, calendar='gregorian') print(d.array) d.datetime_array date_times = cf.dt_vector(['2004-02-29', '2004-02-30', '2004-03-01'], calendar='360_day') print(date_times) e = cf.Data(date_times) print(e.array)
def _formula_terms(standard_name): """Return a field construct with a vertical CRS, its computed non- parametric coordinates, and the computed standard name.""" # field: air_temperature field = cf.Field() field.set_properties({"standard_name": "air_temperature", "units": "K"}) data = cf.Data([0, 1, 2], units="K", dtype="f8") # domain_axis: Z c = cf.DomainAxis() c.set_size(3) c.nc_set_dimension("z") axisZ = field.set_construct(c, key="domainaxis1", copy=False) field.set_data(data) # coordinate_reference: coordref = cf.CoordinateReference() coordref.coordinate_conversion.set_parameter( "standard_name", standard_name ) aux = cf.AuxiliaryCoordinate() aux.long_name = "Computed from parametric {} vertical coordinates".format( standard_name ) if standard_name == "atmosphere_ln_pressure_coordinate": computed_standard_name = "air_pressure" # Computed vertical corodinates aux.standard_name = computed_standard_name data = cf.Data([700, 500, 300], "hPa", dtype="f8") aux.set_data(data) bounds = cf.Bounds() data = cf.Data([[800, 600], [600, 400], [400, 200]], "hPa", dtype="f8") bounds.set_data(data) aux.set_bounds(bounds) # domain_ancillary: p0 p0 = cf.DomainAncillary() p0.standard_name = ( "reference_air_pressure_for_atmosphere_vertical_coordinate" ) data = cf.Data(1000.0, units="hPa", dtype="f8") p0.set_data(data) p0_key = field.set_construct(p0, axes=(), copy=False) # domain_ancillary: Z lev = cf.DomainAncillary() lev.standard_name = standard_name data = -(aux.data / p0.data).log() lev.set_data(data) bounds = cf.Bounds() data = -(aux.bounds.data / p0.data).log() bounds.set_data(data) lev.set_bounds(bounds) lev_key = field.set_construct(lev, axes=axisZ, copy=False) # dimension_coordinate: Z levc = cf.DimensionCoordinate(source=lev) levc_key = field.set_construct(levc, axes=axisZ, copy=False) # coordinate_reference: coordref.set_coordinates({levc_key}) coordref.coordinate_conversion.set_domain_ancillaries( {"p0": p0_key, "lev": lev_key} ) field.set_construct(coordref) elif standard_name == "atmosphere_sigma_coordinate": computed_standard_name = "air_pressure" # Computed vertical corodinates aux.standard_name = computed_standard_name data = cf.Data([700, 500, 300], "hPa", dtype="f8") aux.set_data(data) b = cf.Bounds() data = cf.Data([[800, 600], [600, 400], [400, 200]], "hPa", dtype="f8") b.set_data(data) aux.set_bounds(b) # domain_ancillary: ps ps = cf.DomainAncillary() ps.standard_name = "surface_air_pressure" data = cf.Data(1000, units="hPa", dtype="f8") ps.set_data(data) ps_key = field.set_construct(ps, axes=(), copy=False) # domain_ancillary: ptop ptop = cf.DomainAncillary() ptop.standard_name = "air_pressure_at_top_of_atmosphere_model" data = cf.Data(10, units="hPa", dtype="f8") ptop.set_data(data) ptop_key = field.set_construct(ptop, axes=(), copy=False) # domain_ancillary: sigma sigma = cf.DomainAncillary() sigma.standard_name = standard_name data = cf.Data([0.6969697, 0.49494949, 0.29292929]) sigma.set_data(data) b = cf.Bounds() data = cf.Data( [ [0.7979798, 0.5959596], [0.5959596, 0.39393939], [0.39393939, 0.19191919], ] ) b.set_data(data) sigma.set_bounds(b) sigma_key = field.set_construct(sigma, axes=axisZ, copy=False) # dimension_coordinate: sigma sigmac = cf.DimensionCoordinate(source=sigma) sigmac_key = field.set_construct(sigmac, axes=axisZ, copy=False) # coordinate_reference: coordref.set_coordinates({sigmac_key}) coordref.coordinate_conversion.set_domain_ancillaries( {"ptop": ptop_key, "ps": ps_key, "sigma": sigma_key} ) field.set_construct(coordref) elif standard_name == "atmosphere_hybrid_sigma_pressure_coordinate": computed_standard_name = "air_pressure" # Computed vertical corodinates aux.standard_name = computed_standard_name data = cf.Data([700, 500, 300], "hPa", dtype="f8") aux.set_data(data) bounds = cf.Bounds() data = cf.Data([[800, 600], [600, 400], [400, 200]], "hPa", dtype="f8") bounds.set_data(data) aux.set_bounds(bounds) # domain_ancillary: ps ps = cf.DomainAncillary() ps.standard_name = "surface_air_pressure" data = cf.Data(1000, units="hPa", dtype="f8") ps.set_data(data) ps_key = field.set_construct(ps, axes=(), copy=False) # domain_ancillary: p0 p0 = cf.DomainAncillary() data = cf.Data(1000, units="hPa", dtype="f8") p0.set_data(data) p0_key = field.set_construct(p0, axes=(), copy=False) # domain_ancillary: a a = cf.DomainAncillary() data = cf.Data([0.6, 0.3, 0], dtype="f8") a.set_data(data) bounds = cf.Bounds() data = cf.Data([[0.75, 0.45], [0.45, 0.15], [0.15, 0]]) bounds.set_data(data) a.set_bounds(bounds) a_key = field.set_construct(a, axes=axisZ, copy=False) # domain_ancillary: b b = cf.DomainAncillary() data = cf.Data([0.1, 0.2, 0.3], dtype="f8") b.set_data(data) bounds = cf.Bounds() data = cf.Data([[0.05, 0.15], [0.15, 0.25], [0.25, 0.2]]) bounds.set_data(data) b.set_bounds(bounds) b_key = field.set_construct(b, axes=axisZ, copy=False) # dimension_coordinate: sigma sigma = cf.DimensionCoordinate() sigma.standard_name = standard_name data = cf.Data([0.6969697, 0.49494949, 0.29292929]) sigma.set_data(data) bounds = cf.Bounds() data = cf.Data( [ [0.7979798, 0.5959596], [0.5959596, 0.39393939], [0.39393939, 0.19191919], ] ) bounds.set_data(data) sigma.set_bounds(bounds) sigma_key = field.set_construct(sigma, axes=axisZ, copy=False) # coordinate_reference: coordref.set_coordinates({sigma_key}) coordref.coordinate_conversion.set_domain_ancillaries( {"p0": p0_key, "a": a_key, "b": b_key, "ps": ps_key} ) field.set_construct(coordref) elif standard_name == "atmosphere_sleve_coordinate": computed_standard_name = "altitude" # Computed vertical corodinates aux.standard_name = computed_standard_name data = cf.Data([100, 200, 300], "m", dtype="f8") aux.set_data(data) bounds = cf.Bounds() data = cf.Data([[50, 150], [150, 250], [250, 350]], "m", dtype="f8") bounds.set_data(data) aux.set_bounds(bounds) # domain_ancillary: ztop ztop = cf.DomainAncillary() ztop.standard_name = "altitude_at_top_of_atmosphere_model" data = cf.Data(1000, units="m", dtype="f8") ztop.set_data(data) ztop_key = field.set_construct(ztop, axes=(), copy=False) # domain_ancillary: zsurf1 zsurf1 = cf.DomainAncillary() data = cf.Data(90, units="m", dtype="f8") zsurf1.set_data(data) zsurf1_key = field.set_construct(zsurf1, axes=(), copy=False) # domain_ancillary: zsurf2 zsurf2 = cf.DomainAncillary() data = cf.Data(0.1, units="m", dtype="f8") zsurf2.set_data(data) zsurf2_key = field.set_construct(zsurf2, axes=(), copy=False) # domain_ancillary: b1 b1 = cf.DomainAncillary() data = cf.Data([0.05, 0.04, 0.03], dtype="f8") b1.set_data(data) bounds = cf.Bounds() data = cf.Data([[0.055, 0.045], [0.045, 0.035], [0.035, 0.025]]) bounds.set_data(data) b1.set_bounds(bounds) b1_key = field.set_construct(b1, axes=axisZ, copy=False) # domain_ancillary: b2 b2 = cf.DomainAncillary() data = cf.Data([0.5, 0.4, 0.3]) b2.set_data(data) bounds = cf.Bounds() data = cf.Data([[0.55, 0.45], [0.45, 0.35], [0.35, 0.25]]) bounds.set_data(data) b2.set_bounds(bounds) b2_key = field.set_construct(b2, axes=axisZ, copy=False) # domain_ancillary: a a = cf.DomainAncillary() data = cf.Data([0.09545, 0.19636, 0.29727]) a.set_data(data) bounds = cf.Bounds() data = cf.Data( [[0.044995, 0.145905], [0.145905, 0.246815], [0.246815, 0.347725]] ) bounds.set_data(data) a.set_bounds(bounds) a_key = field.set_construct(a, axes=axisZ, copy=False) # coordinate_reference: coordref.coordinate_conversion.set_domain_ancillaries( { "zsurf1": zsurf1_key, "a": a_key, "b1": b1_key, "b2": b2_key, "zsurf2": zsurf2_key, "ztop": ztop_key, } ) field.set_construct(coordref) elif standard_name == "ocean_sigma_coordinate": computed_standard_name = "altitude" # Computed vertical corodinates aux.standard_name = computed_standard_name data = cf.Data([10, 20, 30], "m", dtype="f8") aux.set_data(data) bounds = cf.Bounds() data = cf.Data([[5, 15], [15, 25], [25, 35]], "m", dtype="f8") bounds.set_data(data) aux.set_bounds(bounds) # domain_ancillary: depth depth = cf.DomainAncillary() depth.standard_name = "sea_floor_depth_below_geoid" data = cf.Data(-1000.0, units="m") depth.set_data(data) depth_key = field.set_construct(depth, axes=(), copy=False) # domain_ancillary: eta eta = cf.DomainAncillary() eta.standard_name = "sea_surface_height_above_geoid" data = cf.Data(100.0, units="m") eta.set_data(data) eta_key = field.set_construct(eta, axes=(), copy=False) # domain_ancillary: sigma sigma = cf.DomainAncillary() sigma.standard_name = standard_name data = cf.Data([0.1, 0.08888888888888889, 0.07777777777777778]) sigma.set_data(data) bounds = cf.Bounds() data = cf.Data( [ [0.10555556, 0.09444444], [0.09444444, 0.08333333], [0.08333333, 0.07222222], ] ) bounds.set_data(data) sigma.set_bounds(bounds) sigma_key = field.set_construct(sigma, axes=axisZ, copy=False) # dimension_coordinate: sigma sigmac = cf.DimensionCoordinate(source=sigma) sigmac_key = field.set_construct(sigmac, axes=axisZ, copy=False) # coordinate_reference: coordref.set_coordinates({sigmac_key}) coordref.coordinate_conversion.set_domain_ancillaries( {"depth": depth_key, "eta": eta_key, "sigma": sigma_key} ) field.set_construct(coordref) elif standard_name == "ocean_s_coordinate": computed_standard_name = "altitude" # Computed vertical corodinates aux.standard_name = computed_standard_name data = cf.Data([15.01701191, 31.86034296, 40.31150319], units="m") aux.set_data(data) bounds = cf.Bounds() data = cf.Data( [ [15.01701191, 23.42877638], [23.42877638, 31.86034296], [31.86034296, 40.31150319], ], units="m", ) bounds.set_data(data) aux.set_bounds(bounds) # domain_ancillary: depth depth = cf.DomainAncillary() depth.standard_name = "sea_floor_depth_below_geoid" data = cf.Data(-1000.0, units="m") depth.set_data(data) depth_key = field.set_construct(depth, axes=(), copy=False) # domain_ancillary: eta eta = cf.DomainAncillary() eta.standard_name = "sea_surface_height_above_geoid" data = cf.Data(100.0, units="m") eta.set_data(data) eta_key = field.set_construct(eta, axes=(), copy=False) # domain_ancillary: depth_c depth_c = cf.DomainAncillary() data = cf.Data(10.0, units="m") depth_c.set_data(data) depth_c_key = field.set_construct(depth_c, axes=(), copy=False) # domain_ancillary: a a = cf.DomainAncillary() data = cf.Data(0.5) a.set_data(data) a_key = field.set_construct(a, axes=(), copy=False) # domain_ancillary: b b = cf.DomainAncillary() data = cf.Data(0.75) b.set_data(data) b_key = field.set_construct(b, axes=(), copy=False) # domain_ancillary: s s = cf.DomainAncillary() s.standard_name = standard_name data = cf.Data([0.1, 0.08, 0.07]) s.set_data(data) bounds = cf.Bounds() data = cf.Data([[0.10, 0.09], [0.09, 0.08], [0.08, 0.07]]) bounds.set_data(data) s.set_bounds(bounds) s_key = field.set_construct(s, axes=axisZ, copy=False) # dimension_coordinate: s sc = cf.DimensionCoordinate(source=s) sc_key = field.set_construct(sc, axes=axisZ, copy=False) # coordinate_reference: coordref.set_coordinates({sc_key}) coordref.coordinate_conversion.set_domain_ancillaries( { "depth": depth_key, "eta": eta_key, "depth_c": depth_c_key, "a": a_key, "b": b_key, "s": s_key, } ) field.set_construct(coordref) elif standard_name == "ocean_s_coordinate_g1": computed_standard_name = "altitude" # Computed vertical corodinates aux.standard_name = computed_standard_name data = cf.Data([555.4, 464.32, 373.33], units="m") aux.set_data(data) bounds = cf.Bounds() data = cf.Data( [[600.85, 509.86], [509.86, 418.87], [418.87, 327.88]], units="m" ) bounds.set_data(data) aux.set_bounds(bounds) # domain_ancillary: depth depth = cf.DomainAncillary() depth.standard_name = "sea_floor_depth_below_geoid" data = cf.Data(-1000.0, units="m") depth.set_data(data) depth_key = field.set_construct(depth, axes=(), copy=False) # domain_ancillary: eta eta = cf.DomainAncillary() eta.standard_name = "sea_surface_height_above_geoid" data = cf.Data(100.0, units="m") eta.set_data(data) eta_key = field.set_construct(eta, axes=(), copy=False) # domain_ancillary: depth_c depth_c = cf.DomainAncillary() data = cf.Data(10.0, units="m") depth_c.set_data(data) depth_c_key = field.set_construct(depth_c, axes=(), copy=False) # domain_ancillary: C C = cf.DomainAncillary() data = cf.Data([-0.5, -0.4, -0.3]) C.set_data(data) bounds = cf.Bounds() data = cf.Data([[-0.55, -0.45], [-0.45, -0.35], [-0.35, -0.25]]) bounds.set_data(data) C.set_bounds(bounds) C_key = field.set_construct(C, axes=axisZ, copy=False) # domain_ancillary: s s = cf.DomainAncillary() s.standard_name = standard_name data = cf.Data([0.1, 0.08, 0.07]) s.set_data(data) bounds = cf.Bounds() data = cf.Data([[0.10, 0.09], [0.09, 0.08], [0.08, 0.07]]) bounds.set_data(data) s.set_bounds(bounds) s_key = field.set_construct(s, axes=axisZ, copy=False) # dimension_coordinate: s sc = cf.DimensionCoordinate(source=s) sc_key = field.set_construct(sc, axes=axisZ, copy=False) # coordinate_reference: coordref.set_coordinates({sc_key}) coordref.coordinate_conversion.set_domain_ancillaries( { "depth": depth_key, "eta": eta_key, "depth_c": depth_c_key, "C": C_key, "s": s_key, } ) field.set_construct(coordref) elif standard_name == "ocean_s_coordinate_g2": computed_standard_name = "altitude" # Computed vertical corodinates aux.standard_name = computed_standard_name data = cf.Data([555.45454545, 464.36363636, 373.36363636], units="m") aux.set_data(data) bounds = cf.Bounds() data = cf.Data( [ [600.90909091, 509.90909091], [509.90909091, 418.90909091], [418.90909091, 327.90909091], ], units="m", ) bounds.set_data(data) aux.set_bounds(bounds) # domain_ancillary: depth depth = cf.DomainAncillary() depth.standard_name = "sea_floor_depth_below_geoid" data = cf.Data(-1000.0, units="m") depth.set_data(data) depth_key = field.set_construct(depth, axes=(), copy=False) # domain_ancillary: eta eta = cf.DomainAncillary() eta.standard_name = "sea_surface_height_above_geoid" data = cf.Data(100.0, units="m") eta.set_data(data) eta_key = field.set_construct(eta, axes=(), copy=False) # domain_ancillary: depth_c depth_c = cf.DomainAncillary() data = cf.Data(10.0, units="m") depth_c.set_data(data) depth_c_key = field.set_construct(depth_c, axes=(), copy=False) # domain_ancillary: C C = cf.DomainAncillary() data = cf.Data([-0.5, -0.4, -0.3]) C.set_data(data) bounds = cf.Bounds() data = cf.Data([[-0.55, -0.45], [-0.45, -0.35], [-0.35, -0.25]]) bounds.set_data(data) C.set_bounds(bounds) C_key = field.set_construct(C, axes=axisZ, copy=False) # domain_ancillary: s s = cf.DomainAncillary() s.standard_name = standard_name data = cf.Data([0.1, 0.08, 0.07]) s.set_data(data) bounds = cf.Bounds() data = cf.Data([[0.10, 0.09], [0.09, 0.08], [0.08, 0.07]]) bounds.set_data(data) s.set_bounds(bounds) s_key = field.set_construct(s, axes=axisZ, copy=False) # dimension_coordinate: s sc = cf.DimensionCoordinate(source=s) sc_key = field.set_construct(sc, axes=axisZ, copy=False) # coordinat # coordinate_reference: coordref.set_coordinates({sc_key}) coordref.coordinate_conversion.set_domain_ancillaries( { "depth": depth_key, "eta": eta_key, "depth_c": depth_c_key, "C": C_key, "s": s_key, } ) field.set_construct(coordref) elif standard_name == "ocean_sigma_z_coordinate": computed_standard_name = "altitude" # Computed vertical corodinates aux.standard_name = computed_standard_name data = cf.Data([10.0, 30.0, 40.0], "m", dtype="f8") aux.set_data(data) bounds = cf.Bounds() data = cf.Data( [[10.0, 19.0], [25.0, 35.0], [35.0, 45.0]], "m", dtype="f8" ) bounds.set_data(data) aux.set_bounds(bounds) # domain_ancillary: depth depth = cf.DomainAncillary() depth.standard_name = "sea_floor_depth_below_geoid" data = cf.Data(-1000.0, units="m") depth.set_data(data) depth_key = field.set_construct(depth, axes=(), copy=False) # domain_ancillary: eta eta = cf.DomainAncillary() eta.standard_name = "sea_surface_height_above_geoid" data = cf.Data(100.0, units="m") eta.set_data(data) eta_key = field.set_construct(eta, axes=(), copy=False) # domain_ancillary: depth_c depth_c = cf.DomainAncillary() data = cf.Data(10.0, units="m") depth_c.set_data(data) depth_c_key = field.set_construct(depth_c, axes=(), copy=False) # domain_ancillary: nsigma nsigma = cf.DomainAncillary() data = cf.Data(1) nsigma.set_data(data) nsigma_key = field.set_construct(nsigma, axes=(), copy=False) # domain_ancillary: zlev zlev = cf.DomainAncillary() zlev.standard_name = "altitude" data = cf.Data([20, 30, 40], units="m", dtype="f8") zlev.set_data(data) bounds = cf.Bounds() data = cf.Data([[15, 25], [25, 35], [35, 45]], units="m", dtype="f8") bounds.set_data(data) zlev.set_bounds(bounds) zlev_key = field.set_construct(zlev, axes=axisZ, copy=False) # domain_ancillary: sigma sigma = cf.DomainAncillary() sigma.standard_name = standard_name data = cf.Data([0.1, 0.08, 0.07]) sigma.set_data(data) bounds = cf.Bounds() data = cf.Data([[0.10, 0.09], [0.09, 0.08], [0.08, 0.07]]) bounds.set_data(data) sigma.set_bounds(bounds) sigma_key = field.set_construct(sigma, axes=axisZ, copy=False) # dimension_coordinate: sigma sigmac = cf.DimensionCoordinate(source=sigma) sigmac_key = field.set_construct(sigmac, axes=axisZ, copy=False) # coordinate_reference: coordref.set_coordinates({sigmac_key}) coordref.coordinate_conversion.set_domain_ancillaries( { "depth": depth_key, "eta": eta_key, "depth_c": depth_c_key, "nsigma": nsigma_key, "zlev": zlev_key, "sigma": sigma_key, } ) field.set_construct(coordref) elif standard_name == "ocean_double_sigma_coordinate": computed_standard_name = "altitude" # Computed vertical corodinates aux.standard_name = computed_standard_name data = cf.Data( [0.15000000000000002, 0.12, 932.895], units="m", dtype="f8" ) aux.set_data(data) bounds = cf.Bounds() data = cf.Data( [ [1.50000e-01, 1.35000e-01], [1.35000e-01, 1.20000e-01], [9.22880e02, 9.32895e02], ], units="m", dtype="f8", ) bounds.set_data(data) aux.set_bounds(bounds) # domain_ancillary: depth depth = cf.DomainAncillary() depth.standard_name = "sea_floor_depth_below_geoid" data = cf.Data(-1000.0, units="m") depth.set_data(data) depth_key = field.set_construct(depth, axes=(), copy=False) # domain_ancillary: z1 z1 = cf.DomainAncillary() data = cf.Data(2, units="m") z1.set_data(data) z1_key = field.set_construct(z1, axes=(), copy=False) # domain_ancillary: z2 z2 = cf.DomainAncillary() data = cf.Data(1.5, units="m") z2.set_data(data) z2_key = field.set_construct(z2, axes=(), copy=False) # domain_ancillary: a a = cf.DomainAncillary() data = cf.Data(2.5, units="m") a.set_data(data) a_key = field.set_construct(a, axes=(), copy=False) # domain_ancillary: href href = cf.DomainAncillary() data = cf.Data(10.5, units="m") href.set_data(data) href_key = field.set_construct(href, axes=(), copy=False) # domain_ancillary: k_c k_c = cf.DomainAncillary() data = cf.Data(1) k_c.set_data(data) k_c_key = field.set_construct(k_c, axes=(), copy=False) # dimension_coordinate: sigma sigma = cf.DomainAncillary() sigma.standard_name = standard_name data = cf.Data([0.1, 0.08, 0.07]) sigma.set_data(data) bounds = cf.Bounds() data = cf.Data([[0.10, 0.09], [0.09, 0.08], [0.08, 0.07]]) bounds.set_data(data) sigma.set_bounds(bounds) sigma_key = field.set_construct(sigma, axes=axisZ, copy=False) # dimension_coordinate: sigma sigmac = cf.DimensionCoordinate(source=sigma) sigmac_key = field.set_construct(sigmac, axes=axisZ, copy=False) # coordinate_reference: coordref.set_coordinates({sigmac_key}) coordref.coordinate_conversion.set_domain_ancillaries( { "depth": depth_key, "a": a_key, "k_c": k_c_key, "z1": z1_key, "z2": z2_key, "href": href_key, "sigma": sigma_key, } ) field.set_construct(coordref) else: raise ValueError( "Bad standard name: {}, " "not an element of FormulaTerms.standard_names".format( standard_name ) ) return (field, aux, computed_standard_name)
def test_CoordinateReference_equals(self): # Create a vertical grid mapping coordinate reference t = cf.CoordinateReference( coordinates=("coord1", ), coordinate_conversion=cf.CoordinateConversion( parameters={ "standard_name": "atmosphere_hybrid_height_coordinate" }, domain_ancillaries={ "a": "aux0", "b": "aux1", "orog": "orog" }, ), ) self.assertTrue(t.equals(t, verbose=2)) self.assertTrue(t.equals(t.copy(), verbose=2)) # Create a horizontal grid mapping coordinate reference t = cf.CoordinateReference( coordinates=["coord1", "fred", "coord3"], coordinate_conversion=cf.CoordinateConversion( parameters={ "grid_mapping_name": "rotated_latitude_longitude", "grid_north_pole_latitude": 38.0, "grid_north_pole_longitude": 190.0, }), ) self.assertTrue(t.equals(t, verbose=2)) self.assertTrue(t.equals(t.copy(), verbose=2)) datum = cf.Datum(parameters={"earth_radius": 6371007}) conversion = cf.CoordinateConversion( parameters={ "grid_mapping_name": "rotated_latitude_longitude", "grid_north_pole_latitude": 38.0, "grid_north_pole_longitude": 190.0, }) t = cf.CoordinateReference( coordinate_conversion=conversion, datum=datum, coordinates=["x", "y", "lat", "lon"], ) self.assertTrue(t.equals(t, verbose=2)) self.assertTrue(t.equals(t.copy(), verbose=2)) # Create a horizontal grid mapping coordinate reference t = cf.CoordinateReference( coordinates=["coord1", "fred", "coord3"], coordinate_conversion=cf.CoordinateConversion( parameters={ "grid_mapping_name": "albers_conical_equal_area", "standard_parallel": [-30, 10], "longitude_of_projection_origin": 34.8, "false_easting": -20000, "false_northing": -30000, }), ) self.assertTrue(t.equals(t, verbose=2)) self.assertTrue(t.equals(t.copy(), verbose=2)) # Create a horizontal grid mapping coordinate reference t = cf.CoordinateReference( coordinates=["coord1", "fred", "coord3"], coordinate_conversion=cf.CoordinateConversion( parameters={ "grid_mapping_name": "albers_conical_equal_area", "standard_parallel": cf.Data([-30, 10]), "longitude_of_projection_origin": 34.8, "false_easting": -20000, "false_northing": -30000, }), ) self.assertTrue(t.equals(t, verbose=2)) self.assertTrue(t.equals(t.copy(), verbose=2))
def test_compute_vertical_coordinates(self): # ------------------------------------------------------------ # atmosphere_hybrid_height_coordinate # ------------------------------------------------------------ f = cf.example_field(1) self.assertIsNone(f.auxiliary_coordinate("altitude", default=None)) g = f.compute_vertical_coordinates(verbose=None) altitude = g.auxiliary_coordinate("altitude") orog = f.domain_ancillary("surface_altitude") a = f.domain_ancillary("ncvar%a") b = f.domain_ancillary("ncvar%b") self.assertTrue(altitude) self.assertTrue(altitude.has_bounds()) self.assertEqual(altitude.shape, (1,) + orog.shape) self.assertEqual(altitude.bounds.shape, altitude.shape + (2,)) # Check array values orog = orog.data.insert_dimension(-1) x = a.data + b.data * orog x.transpose([2, 0, 1], inplace=True) self.assertTrue(x.equals(altitude.data, verbose=3)) # Check array bounds values orog = orog.insert_dimension(-1) bounds = a.bounds.data + b.bounds.data * orog bounds.transpose([2, 0, 1, 3], inplace=True) self.assertTrue(bounds.equals(altitude.bounds.data, verbose=3)) # ------------------------------------------------------------ # Missing 'a' bounds # ------------------------------------------------------------ a.del_bounds() g = f.compute_vertical_coordinates(verbose=None) altitude = g.auxiliary_coordinate("altitude") orog = f.domain_ancillary("surface_altitude") self.assertTrue(altitude) self.assertEqual(altitude.shape, (1,) + orog.shape) self.assertFalse(altitude.has_bounds()) # Check array values orog = orog.data.insert_dimension(-1) x = a.data + b.data * orog x.transpose([2, 0, 1], inplace=True) self.assertTrue(x.equals(altitude.data, verbose=3)) # ------------------------------------------------------------ # Missing 'a' # ------------------------------------------------------------ f.del_construct("ncvar%a") g = f.compute_vertical_coordinates(verbose=None) altitude = g.auxiliary_coordinate("altitude") orog = f.domain_ancillary("surface_altitude") self.assertTrue(altitude) self.assertTrue(altitude.has_bounds()) self.assertEqual(altitude.shape, (1,) + orog.shape) self.assertEqual(altitude.bounds.shape, altitude.shape + (2,)) # Check array values orog = orog.data.insert_dimension(-1) x = b.data * orog x.transpose([2, 0, 1], inplace=True) self.assertTrue(x.equals(altitude.data, verbose=3)) # Check array bounds values orog = orog.insert_dimension(-1) bounds = b.bounds.data * orog bounds.transpose([2, 0, 1, 3], inplace=True) self.assertTrue(bounds.equals(altitude.bounds.data, verbose=3)) # ------------------------------------------------------------ # Missing 'a' and no 'b' bounds # ------------------------------------------------------------ b.del_bounds() g = f.compute_vertical_coordinates(verbose=None) altitude = g.auxiliary_coordinate("altitude") orog = f.domain_ancillary("surface_altitude") self.assertTrue(altitude) self.assertFalse(altitude.has_bounds()) self.assertEqual(altitude.shape, (1,) + orog.shape) # Check array values orog = orog.data.insert_dimension(-1) x = b.data * orog x.transpose([2, 0, 1], inplace=True) self.assertTrue(x.equals(altitude.data, verbose=3)) # ------------------------------------------------------------ # Missing 'a' and missing 'b' # ------------------------------------------------------------ f.del_construct("ncvar%b") g = f.compute_vertical_coordinates(verbose=None) altitude = g.auxiliary_coordinate("altitude") orog = f.domain_ancillary("surface_altitude") self.assertTrue(altitude) self.assertTrue(altitude.has_bounds()) self.assertEqual(altitude.shape, orog.shape) self.assertEqual(altitude.bounds.shape, altitude.shape + (2,)) # Check array values x = 0 * orog.data self.assertTrue(x.equals(altitude.data), repr(x)) # Check array bounds values orog = orog.insert_dimension(-1) bounds = cf.Data([0, 0]) * orog.data self.assertTrue(bounds.equals(altitude.bounds.data), repr(x)) # ------------------------------------------------------------ # Check in-place # ------------------------------------------------------------ self.assertIsNone(f.compute_vertical_coordinates(inplace=True)) f.del_construct("surface_altitude") with self.assertRaises(ValueError): g = f.compute_vertical_coordinates() # ------------------------------------------------------------ # Check with no vertical coordinates # ------------------------------------------------------------ f = cf.example_field(0) g = f.compute_vertical_coordinates() self.assertTrue(g.equals(f)) # ------------------------------------------------------------ # Check other types # ------------------------------------------------------------ for standard_name in cf.formula_terms.FormulaTerms.standard_names: if standard_name == "atmosphere_hybrid_height_coordinate": continue f, a, csn = _formula_terms(standard_name) g = f.compute_vertical_coordinates(verbose=None) x = g.auxiliary_coordinate(csn) self.assertTrue( x.equals(a, atol=1e-5, rtol=1e-05, verbose=-1), "{}, {}, {}\n{}\n{}".format( standard_name, x.array, a.array, x.bounds.array, a.bounds.array, ), )
def test_GATHERING_create(self): if self.test_only and inspect.stack()[0][3] not in self.test_only: return # Define the gathered values gathered_array = numpy.array([[280, 282.5, 281], [279, 278, 277.5]], dtype='float32') # Define the list array values list_array = [1, 4, 5] # Initialise the list variable list_variable = cf.List(data=cf.Data(list_array)) # Initialise the gathered array object array = cf.GatheredArray(compressed_array=cf.Data(gathered_array), compressed_dimension=1, shape=(2, 3, 2), size=12, ndim=3, list_variable=list_variable) # Create the field construct with the domain axes and the # gathered array tas = cf.Field(properties={ 'standard_name': 'air_temperature', 'units': 'K' }) # Create the domain axis constructs for the uncompressed array T = tas.set_construct(cf.DomainAxis(2)) Y = tas.set_construct(cf.DomainAxis(3)) X = tas.set_construct(cf.DomainAxis(2)) uncompressed_array = numpy.ma.masked_array(data=[[[1, 280.0], [1, 1], [282.5, 281.0]], [[1, 279.0], [1, 1], [278.0, 277.5]]], mask=[[[True, False], [True, True], [False, False]], [[True, False], [True, True], [False, False]]], fill_value=1e+20, dtype='float32') for chunksize in (1000000, ): cf.chunksize(chunksize) message = 'chunksize=' + str(chunksize) # Set the data for the field tas.set_data(cf.Data(array), axes=[T, Y, X]) self.assertTrue((tas.data.array == uncompressed_array).all(), message) self.assertEqual(tas.data.get_compression_type(), 'gathered', message) self.assertTrue( (tas.data.compressed_array == numpy.array( [[280., 282.5, 281.], [279., 278., 277.5]], dtype='float32')).all(), message) self.assertTrue( (tas.data.get_list().data.array == numpy.array([1, 4, 5])).all(), message)