Ejemplo n.º 1
0
    def test_example_field(self):
        for n in range(8):
            f = cf.example_field(n)
            f.array
            f.dump(display=False)

        with self.assertRaises(Exception):
            cf.example_field(-999)
Ejemplo n.º 2
0
    def test_example_field(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        for n in range(8):
            f = cf.example_field(n)
            _ = f.array
            _ = f.dump(display=False)

        with self.assertRaises(Exception):
            _ = cf.example_field(-999)
Ejemplo n.º 3
0
    def test_aggregate_verbosity(self):
        for chunksize in self.chunk_sizes:
            f0 = cf.example_field(0)
            f1 = cf.example_field(1)

            detail_header = "DETAIL:cf.aggregate:STRUCTURAL SIGNATURE:"
            debug_header = "DEBUG:cf.aggregate:COMPLETE AGGREGATION METADATA:"

            # 'DEBUG' (-1) verbosity should output both log message headers...
            with self.assertLogs(level="NOTSET") as catch:
                cf.aggregate([f0, f1], verbose=-1)
                for header in (detail_header, debug_header):
                    self.assertTrue(
                        any(
                            log_item.startswith(header)
                            for log_item in catch.output),
                        "No log entry begins with '{}'".format(header),
                    )

            # ...but with 'DETAIL' (3), should get only the detail-level one.
            with self.assertLogs(level="NOTSET") as catch:
                cf.aggregate([f0, f1], verbose=3)
                self.assertTrue(
                    any(
                        log_item.startswith(detail_header)
                        for log_item in catch.output),
                    "No log entry begins with '{}'".format(detail_header),
                )
                self.assertFalse(
                    any(
                        log_item.startswith(debug_header)
                        for log_item in catch.output),
                    "A log entry begins with '{}' but should not".format(
                        debug_header),
                )

            # and neither should emerge at the 'WARNING' (1) level.
            with self.assertLogs(level="NOTSET") as catch:
                logger.warning(
                    "Dummy message to log something at warning level so that "
                    "'assertLog' does not error when no logs messages emerge.")
                # Note: can use assertNoLogs in Python 3.10 to avoid this, see:
                # https://bugs.python.org/issue39385

                cf.aggregate([f0, f1], verbose=1)
                for header in (detail_header, debug_header):
                    self.assertFalse(
                        any(
                            log_item.startswith(header)
                            for log_item in catch.output),
                        "A log entry begins with '{}' but should not".format(
                            header),
                    )
Ejemplo n.º 4
0
    def test_Domain_transpose(self):
        f = cf.example_field(1)
        d = f.domain

        axes = [re.compile("^atmos"), "grid_latitude", "X"]

        g = f.transpose(axes, constructs=True)
        e = d.transpose(axes + ["T"])
        self.assertTrue(e.equals(g.domain))

        self.assertIsNone(e.transpose(axes + ["T"], inplace=True))

        with self.assertRaises(ValueError):
            d.transpose(["X", "Y"])

        with self.assertRaises(ValueError):
            d.transpose(["X", "Y", 1])

        with self.assertRaises(ValueError):
            d.transpose([2, 1])

        with self.assertRaises(ValueError):
            d.transpose(["Y", "Z"])

        with self.assertRaises(ValueError):
            d.transpose(["Y", "Y", "Z"])

        with self.assertRaises(ValueError):
            d.transpose(["Y", "X", "Z", "Y"])

        with self.assertRaises(ValueError):
            d.transpose(["Y", "X", "Z", 1])
Ejemplo n.º 5
0
class ConstructsTest(unittest.TestCase):
    """Unit test for the Constructs class."""

    f = cf.example_field(1)

    def setUp(self):
        """Preparations called immediately before each test method."""
        # Disable log messages to silence expected warnings
        cf.LOG_LEVEL("DISABLE")
        # Note: to enable all messages for given methods, lines or
        # calls (those without a 'verbose' option to do the same)
        # e.g. to debug them, wrap them (for methods, start-to-end
        # internally) as follows:
        #
        # cf.LOG_LEVEL('DEBUG')
        # < ... test code ... >
        # cf.log_level('DISABLE')

    def test_Constructs__repr__(self):
        """Test all means of Construct inspection."""
        f = self.f

        repr(f.constructs)

    def test_Constructs_filter_by_naxes(self):
        """Test the `filter_by_naxes` Constructs method."""
        c = self.f.constructs

        self.assertEqual(len(c.filter_by_naxes()), 12)
        self.assertEqual(len(c.filter_by_naxes(1)), 7)
        self.assertEqual(len(c.filter_by_naxes(cf.ge(2))), 5)
        self.assertEqual(len(c.filter_by_naxes(1, cf.ge(2))), 12)
Ejemplo n.º 6
0
    def test_write_coordinates(self):
        f = cf.example_field(0)

        cf.write(f, tmpfile, coordinates=True)
        g = cf.read(tmpfile)

        self.assertEqual(len(g), 1)
        self.assertTrue(g[0].equals(f))
Ejemplo n.º 7
0
    def test_aggregate_domain(self):
        f = cf.example_field(0)
        g = f[0:3].domain
        h = f[3:].domain

        x = cf.aggregate([g, h])

        self.assertEqual(len(x), 1, x)
Ejemplo n.º 8
0
    def test_groups_compression(self):
        f = cf.example_field(4)

        ungrouped_file = ungrouped_file3
        grouped_file = grouped_file3

        f.compress('indexed_contiguous', inplace=True)
        f.data.get_count().nc_set_variable('count')
        f.data.get_index().nc_set_variable('index')

        cf.write(f, ungrouped_file, verbose=1)
        g = cf.read(ungrouped_file)[0]
        self.assertTrue(f.equals(g, verbose=2))

        # ------------------------------------------------------------
        # Move the field construct to the /forecast/model group
        # ------------------------------------------------------------
        g.nc_set_variable_groups(['forecast', 'model'])

        # ------------------------------------------------------------
        # Move the count variable to the /forecast group
        # ------------------------------------------------------------
        g.data.get_count().nc_set_variable_groups(['forecast'])

        # ------------------------------------------------------------
        # Move the index variable to the /forecast group
        # ------------------------------------------------------------
        g.data.get_index().nc_set_variable_groups(['forecast'])

        # ------------------------------------------------------------
        # Move the coordinates that span the element dimension to the
        # /forecast group
        # ------------------------------------------------------------
        name = 'altitude'
        g.construct(name).nc_set_variable_groups(['forecast'])

        # ------------------------------------------------------------
        # Move the sample dimension to the /forecast group
        # ------------------------------------------------------------
        g.data.get_count().nc_set_sample_dimension_groups(['forecast'])

        cf.write(g, grouped_file, verbose=1)

        nc = netCDF4.Dataset(grouped_file, 'r')
        self.assertIn(f.nc_get_variable(),
                      nc.groups['forecast'].groups['model'].variables)
        self.assertIn(f.data.get_count().nc_get_variable(),
                      nc.groups['forecast'].variables)
        self.assertIn(f.data.get_index().nc_get_variable(),
                      nc.groups['forecast'].variables)
        self.assertIn(
            f.construct('altitude').nc_get_variable(),
            nc.groups['forecast'].variables)
        nc.close()

        h = cf.read(grouped_file, verbose=1)
        self.assertEqual(len(h), 1, repr(h))
        self.assertTrue(f.equals(h[0], verbose=2))
Ejemplo n.º 9
0
    def test_unique_domains(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        f = cf.example_field(0)
        g = cf.example_field(1)

        fl = cf.FieldList()
        self.assertFalse(fl.unique_domains())

        fl = cf.FieldList([f])
        self.assertEqual(len(fl.unique_domains()), 1)

        fl = cf.FieldList([f, f.copy()])
        self.assertEqual(len(fl.unique_domains()), 1)

        fl = cf.FieldList([f, f.copy(), g])
        self.assertEqual(len(fl.unique_domains()), 2)
Ejemplo n.º 10
0
    def test_Field_collapse_WEIGHTS(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        verbose = False

        f = cf.example_field(2)

        if verbose:
            print(f)

        g = f.collapse("area: mean")
        g = f.collapse("area: mean", weights="area")
        if verbose:
            print(g)

        # Check area/volume collapses on fields with a different setup:
        h = cf.example_field(3)
        h.collapse("volume: minimum")
        i = cf.example_field(4)
        i.collapse("area: maximum")
Ejemplo n.º 11
0
    def test_aggregate_dimension(self):
        """Test the promotion of property to axis."""
        f = cf.example_field(0)
        g = f.copy()

        f.set_property("sim", "r1i1p1f1")
        g.set_property("sim", "r2i1p1f1")

        self.assertFalse(len(f.auxiliary_coordinates()))

        a = cf.aggregate([f, g], dimension="sim")
        self.assertEqual(len(a), 1)

        a = a[0]
        self.assertEqual(len(a.auxiliary_coordinates()), 1)
Ejemplo n.º 12
0
    def test_Field_collapse_WEIGHTS(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        verbose = False

        f = cf.example_field(2)

        if verbose:
            print(f)

        g = f.collapse('area: mean')
        g = f.collapse('area: mean', weights='area')
        if verbose:
            print(g)
Ejemplo n.º 13
0
    def test_write_filename(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        tmpfiles.append(tmpfile)

        f = cf.example_field(0)
        a = f.array

        cf.write(f, tmpfile)
        g = cf.read(tmpfile)

        with self.assertRaises(Exception):
            cf.write(g, tmpfile)

        self.assertTrue((a == g[0].array).all())
Ejemplo n.º 14
0
    def test_Domain_creation_commands(self):
        for f in cf.example_fields():
            _ = f.domain.creation_commands()

        f = cf.example_field(1).domain

        for rd in (False, True):
            _ = f.creation_commands(representative_data=rd)

        for indent in (0, 4):
            _ = f.creation_commands(indent=indent)

        for s in (False, True):
            _ = f.creation_commands(string=s)

        for ns in ("cf", ""):
            _ = f.creation_commands(namespace=ns)
Ejemplo n.º 15
0
    def test_Field_regrid_size1_dimensions(self):
        # Check that non-regridded size 1 axes are handled OK
        self.assertFalse(cf.regrid_logging())

        f = cf.example_field(0)
        shape = f.shape

        g = f.regrids(f, method="linear")
        self.assertEqual(g.shape, (shape))
        g = f.regridc(f, method="linear", axes="X")
        self.assertEqual(g.shape, (shape))

        f.insert_dimension("T", position=0, inplace=True)
        shape = f.shape
        g = f.regrids(f, method="linear")
        self.assertEqual(g.shape, shape)
        g = f.regridc(f, method="linear", axes="X")
        self.assertEqual(g.shape, shape)
Ejemplo n.º 16
0
    def test_differential_operators(self):
        f = cf.example_field(0)

        radius = 2

        fx, fy = f.grad_xy(radius=radius, one_sided_at_boundary=True)
        c = cf.curl_xy(fx, fy, radius=radius, one_sided_at_boundary=True)

        # Divergence of curl is zero
        dc = cf.div_xy(c, c, radius=radius, one_sided_at_boundary=True)

        zeros = dc.copy()
        zeros[...] = 0
        self.assertTrue(dc.data.equals(zeros.data, rtol=0, atol=1e-15))

        # Curl of gradient is zero
        cg = cf.curl_xy(fx, fy, radius=radius, one_sided_at_boundary=True)

        zeros = cg.copy()
        zeros[...] = 0
        self.assertTrue(cg.data.equals(zeros.data, rtol=0, atol=1e-15))
Ejemplo n.º 17
0
class RegridOperatorTest(unittest.TestCase):
    f = cf.example_field(0)
    r = f.regrids(f, "conservative", return_operator=True)

    def test_RegridOperator__repr__(self):
        repr(self.r)

    def test_RegridOperator_name(self):
        self.assertEqual(self.r.name, "regrids")

    def test_RegridOperator_method(self):
        self.assertEqual(self.r.method, "conservative_1st")

    def test_RegridOperator_parameters(self):
        self.assertIsInstance(self.r.parameters, dict)

    def test_RegridOperator_check_method(self):
        self.assertTrue(self.r.check_method("conservative"))
        self.assertTrue(self.r.check_method("conservative_1st"))
        self.assertFalse(self.r.check_method("conservative_2nd"))

    def test_RegridOperator_destroy(self):
        self.r.destroy()

    def test_RegridOperator_get_parameter(self):
        self.r.get_parameter("dst")

        with self.assertRaises(ValueError):
            self.r.get_parameter(None)

    def test_RegridOperator_copy(self):
        self.r.copy()

    @unittest.skipUnless(cf._found_ESMF, "Requires ESMF package.")
    def test_RegridOperator_regrid(self):
        from ESMF import Regrid

        self.assertIsInstance(self.r.regrid, Regrid)
Ejemplo n.º 18
0
    def test_read_mask(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        f = cf.example_field(0)

        N = f.size

        f.data[1, 1] = cf.masked
        f.data[2, 2] = cf.masked

        f.del_property("_FillValue", None)
        f.del_property("missing_value", None)

        cf.write(f, tmpfile)

        g = cf.read(tmpfile)[0]
        self.assertEqual(numpy.ma.count(g.data.array), N - 2)

        g = cf.read(tmpfile, mask=False)[0]
        self.assertEqual(numpy.ma.count(g.data.array), N)

        g.apply_masking(inplace=True)
        self.assertEqual(numpy.ma.count(g.data.array), N - 2)

        f.set_property("_FillValue", 999)
        f.set_property("missing_value", -111)
        cf.write(f, tmpfile)

        g = cf.read(tmpfile)[0]
        self.assertEqual(numpy.ma.count(g.data.array), N - 2)

        g = cf.read(tmpfile, mask=False)[0]
        self.assertEqual(numpy.ma.count(g.data.array), N)

        g.apply_masking(inplace=True)
        self.assertEqual(numpy.ma.count(g.data.array), N - 2)
    def test_compute_vertical_coordinates(self):
        # ------------------------------------------------------------
        # atmosphere_hybrid_height_coordinate
        # ------------------------------------------------------------
        f = cf.example_field(1)
        self.assertIsNone(f.auxiliary_coordinate("altitude", default=None))

        g = f.compute_vertical_coordinates(verbose=None)

        altitude = g.auxiliary_coordinate("altitude")
        orog = f.domain_ancillary("surface_altitude")
        a = f.domain_ancillary("ncvar%a")
        b = f.domain_ancillary("ncvar%b")

        self.assertTrue(altitude)
        self.assertTrue(altitude.has_bounds())
        self.assertEqual(altitude.shape, (1,) + orog.shape)
        self.assertEqual(altitude.bounds.shape, altitude.shape + (2,))

        # Check array values
        orog = orog.data.insert_dimension(-1)
        x = a.data + b.data * orog
        x.transpose([2, 0, 1], inplace=True)
        self.assertTrue(x.equals(altitude.data, verbose=3))

        # Check array bounds values
        orog = orog.insert_dimension(-1)
        bounds = a.bounds.data + b.bounds.data * orog
        bounds.transpose([2, 0, 1, 3], inplace=True)
        self.assertTrue(bounds.equals(altitude.bounds.data, verbose=3))

        # ------------------------------------------------------------
        # Missing 'a' bounds
        # ------------------------------------------------------------
        a.del_bounds()
        g = f.compute_vertical_coordinates(verbose=None)

        altitude = g.auxiliary_coordinate("altitude")
        orog = f.domain_ancillary("surface_altitude")
        self.assertTrue(altitude)
        self.assertEqual(altitude.shape, (1,) + orog.shape)
        self.assertFalse(altitude.has_bounds())

        # Check array values
        orog = orog.data.insert_dimension(-1)
        x = a.data + b.data * orog
        x.transpose([2, 0, 1], inplace=True)
        self.assertTrue(x.equals(altitude.data, verbose=3))

        # ------------------------------------------------------------
        # Missing 'a'
        # ------------------------------------------------------------
        f.del_construct("ncvar%a")
        g = f.compute_vertical_coordinates(verbose=None)

        altitude = g.auxiliary_coordinate("altitude")
        orog = f.domain_ancillary("surface_altitude")

        self.assertTrue(altitude)
        self.assertTrue(altitude.has_bounds())
        self.assertEqual(altitude.shape, (1,) + orog.shape)
        self.assertEqual(altitude.bounds.shape, altitude.shape + (2,))

        # Check array values
        orog = orog.data.insert_dimension(-1)
        x = b.data * orog
        x.transpose([2, 0, 1], inplace=True)
        self.assertTrue(x.equals(altitude.data, verbose=3))

        # Check array bounds values
        orog = orog.insert_dimension(-1)
        bounds = b.bounds.data * orog
        bounds.transpose([2, 0, 1, 3], inplace=True)
        self.assertTrue(bounds.equals(altitude.bounds.data, verbose=3))

        # ------------------------------------------------------------
        # Missing 'a' and no 'b' bounds
        # ------------------------------------------------------------
        b.del_bounds()
        g = f.compute_vertical_coordinates(verbose=None)

        altitude = g.auxiliary_coordinate("altitude")
        orog = f.domain_ancillary("surface_altitude")

        self.assertTrue(altitude)
        self.assertFalse(altitude.has_bounds())
        self.assertEqual(altitude.shape, (1,) + orog.shape)

        # Check array values
        orog = orog.data.insert_dimension(-1)
        x = b.data * orog
        x.transpose([2, 0, 1], inplace=True)
        self.assertTrue(x.equals(altitude.data, verbose=3))

        # ------------------------------------------------------------
        # Missing 'a' and missing 'b'
        # ------------------------------------------------------------
        f.del_construct("ncvar%b")

        g = f.compute_vertical_coordinates(verbose=None)

        altitude = g.auxiliary_coordinate("altitude")
        orog = f.domain_ancillary("surface_altitude")

        self.assertTrue(altitude)
        self.assertTrue(altitude.has_bounds())
        self.assertEqual(altitude.shape, orog.shape)
        self.assertEqual(altitude.bounds.shape, altitude.shape + (2,))

        # Check array values
        x = 0 * orog.data
        self.assertTrue(x.equals(altitude.data), repr(x))

        # Check array bounds values
        orog = orog.insert_dimension(-1)
        bounds = cf.Data([0, 0]) * orog.data
        self.assertTrue(bounds.equals(altitude.bounds.data), repr(x))

        # ------------------------------------------------------------
        # Check in-place
        # ------------------------------------------------------------
        self.assertIsNone(f.compute_vertical_coordinates(inplace=True))

        f.del_construct("surface_altitude")
        with self.assertRaises(ValueError):
            g = f.compute_vertical_coordinates()

        # ------------------------------------------------------------
        # Check with no vertical coordinates
        # ------------------------------------------------------------
        f = cf.example_field(0)
        g = f.compute_vertical_coordinates()
        self.assertTrue(g.equals(f))

        # ------------------------------------------------------------
        # Check other types
        # ------------------------------------------------------------
        for standard_name in cf.formula_terms.FormulaTerms.standard_names:
            if standard_name == "atmosphere_hybrid_height_coordinate":
                continue
            f, a, csn = _formula_terms(standard_name)

            g = f.compute_vertical_coordinates(verbose=None)

            x = g.auxiliary_coordinate(csn)

            self.assertTrue(
                x.equals(a, atol=1e-5, rtol=1e-05, verbose=-1),
                "{}, {}, {}\n{}\n{}".format(
                    standard_name,
                    x.array,
                    a.array,
                    x.bounds.array,
                    a.bounds.array,
                ),
            )
Ejemplo n.º 20
0
class CoordinateReferenceTest(unittest.TestCase):
    f = cf.example_field(1)

    datum = cf.Datum(parameters={"earth_radius": 6371007})

    # Create a vertical grid mapping coordinate reference
    vconversion = cf.CoordinateConversion(
        parameters={"standard_name": "atmosphere_hybrid_height_coordinate"},
        domain_ancillaries={
            "a": "auxiliarycoordinate0",
            "b": "auxiliarycoordinate1",
            "orog": "domainancillary0",
        },
    )

    vcr = cf.CoordinateReference(coordinates=("coord1", ),
                                 datum=datum,
                                 coordinate_conversion=vconversion)

    # Create a horizontal grid mapping coordinate reference
    hconversion = cf.CoordinateConversion(
        parameters={
            "grid_mapping_name": "rotated_latitude_longitude",
            "grid_north_pole_latitude": 38.0,
            "grid_north_pole_longitude": 190.0,
        })

    hcr = cf.CoordinateReference(
        coordinate_conversion=hconversion,
        datum=datum,
        coordinates=["x", "y", "lat", "lon"],
    )

    def test_CoordinateReference__repr__str__dump(self):
        coordinate_conversion = cf.CoordinateConversion(
            parameters={
                "standard_name": "atmosphere_hybrid_height_coordinate"
            },
            domain_ancillaries={
                "a": "aux0",
                "b": "aux1",
                "orog": "orog"
            },
        )

        datum = cf.Datum(parameters={"earth_radius": 23423423423.34})

        # Create a vertical grid mapping coordinate reference
        t = cf.CoordinateReference(
            coordinates=("coord1", ),
            coordinate_conversion=coordinate_conversion,
            datum=datum,
        )

        repr(t)
        str(t)
        t.dump(display=False)

        self.assertFalse(t.has_bounds())

        repr(datum)
        str(datum)

        repr(coordinate_conversion)
        str(coordinate_conversion)

    def test_CoordinateReference_equals(self):
        # Create a vertical grid mapping coordinate reference
        t = cf.CoordinateReference(
            coordinates=("coord1", ),
            coordinate_conversion=cf.CoordinateConversion(
                parameters={
                    "standard_name": "atmosphere_hybrid_height_coordinate"
                },
                domain_ancillaries={
                    "a": "aux0",
                    "b": "aux1",
                    "orog": "orog"
                },
            ),
        )
        self.assertTrue(t.equals(t, verbose=2))
        self.assertTrue(t.equals(t.copy(), verbose=2))

        # Create a horizontal grid mapping coordinate reference
        t = cf.CoordinateReference(
            coordinates=["coord1", "fred", "coord3"],
            coordinate_conversion=cf.CoordinateConversion(
                parameters={
                    "grid_mapping_name": "rotated_latitude_longitude",
                    "grid_north_pole_latitude": 38.0,
                    "grid_north_pole_longitude": 190.0,
                }),
        )
        self.assertTrue(t.equals(t, verbose=2))
        self.assertTrue(t.equals(t.copy(), verbose=2))

        datum = cf.Datum(parameters={"earth_radius": 6371007})
        conversion = cf.CoordinateConversion(
            parameters={
                "grid_mapping_name": "rotated_latitude_longitude",
                "grid_north_pole_latitude": 38.0,
                "grid_north_pole_longitude": 190.0,
            })

        t = cf.CoordinateReference(
            coordinate_conversion=conversion,
            datum=datum,
            coordinates=["x", "y", "lat", "lon"],
        )

        self.assertTrue(t.equals(t, verbose=2))
        self.assertTrue(t.equals(t.copy(), verbose=2))

        # Create a horizontal grid mapping coordinate reference
        t = cf.CoordinateReference(
            coordinates=["coord1", "fred", "coord3"],
            coordinate_conversion=cf.CoordinateConversion(
                parameters={
                    "grid_mapping_name": "albers_conical_equal_area",
                    "standard_parallel": [-30, 10],
                    "longitude_of_projection_origin": 34.8,
                    "false_easting": -20000,
                    "false_northing": -30000,
                }),
        )
        self.assertTrue(t.equals(t, verbose=2))
        self.assertTrue(t.equals(t.copy(), verbose=2))

        # Create a horizontal grid mapping coordinate reference
        t = cf.CoordinateReference(
            coordinates=["coord1", "fred", "coord3"],
            coordinate_conversion=cf.CoordinateConversion(
                parameters={
                    "grid_mapping_name": "albers_conical_equal_area",
                    "standard_parallel": cf.Data([-30, 10]),
                    "longitude_of_projection_origin": 34.8,
                    "false_easting": -20000,
                    "false_northing": -30000,
                }),
        )
        self.assertTrue(t.equals(t, verbose=2))
        self.assertTrue(t.equals(t.copy(), verbose=2))

    def test_CoordinateReference_default_value(self):
        f = self.f.copy()

        self.assertEqual(cf.CoordinateReference.default_value("qwerty"), 0.0)
        self.assertEqual(cf.CoordinateReference.default_value("earth_depth"),
                         0.0)

        cr = f.construct("standard_name:atmosphere_hybrid_height_coordinate")
        self.assertEqual(cr.default_value("qwerty"), 0.0)
        self.assertEqual(cr.default_value("earth_depth"), 0.0)

    def test_CoordinateReference_canonical_units(self):
        f = self.f.copy()

        self.assertIsNone(cf.CoordinateReference.canonical_units("qwerty"))
        self.assertEqual(
            cf.CoordinateReference.canonical_units("earth_radius"),
            cf.Units("m"),
        )

        cr = f.construct("standard_name:atmosphere_hybrid_height_coordinate")
        self.assertIsNone(cr.canonical_units("qwerty"))
        self.assertEqual(cr.canonical_units("earth_radius"), cf.Units("m"))

    def test_CoordinateReference_match(self):
        self.assertTrue(self.vcr.match())
        self.assertTrue(
            self.vcr.match(
                "standard_name:atmosphere_hybrid_height_coordinate"))
        self.assertTrue(self.vcr.match("atmosphere_hybrid_height_coordinate"))
        self.assertTrue(
            self.vcr.match("atmosphere_hybrid_height_coordinate", "qwerty"))

        self.assertTrue(self.hcr.match())
        self.assertTrue(
            self.hcr.match("grid_mapping_name:rotated_latitude_longitude"))
        self.assertTrue(self.hcr.match("rotated_latitude_longitude"))
        self.assertTrue(
            self.hcr.match("grid_mapping_name:rotated_latitude_longitude",
                           "qwerty"))

    def test_CoordinateReference_get__getitem__(self):
        self.assertEqual(self.vcr["earth_radius"],
                         self.datum.get_parameter("earth_radius"))
        self.assertTrue(
            self.vcr["standard_name"],
            self.vconversion.get_parameter("standard_name"),
        )
        self.assertEqual(
            self.vcr.get("earth_radius"),
            self.datum.get_parameter("earth_radius"),
        )
        self.assertIsNone(self.vcr.get("orog"))
        self.assertEqual(self.vcr.get("orog", "qwerty"), "qwerty")
        self.assertIsNone(self.vcr.get("qwerty"))
        self.assertEqual(
            self.vcr["standard_name"],
            self.vconversion.get_parameter("standard_name"),
        )
        with self.assertRaises(Exception):
            self.vcr["orog"]

        self.assertEqual(self.hcr["earth_radius"],
                         self.datum.get_parameter("earth_radius"))
        self.assertEqual(
            self.hcr["grid_north_pole_latitude"],
            self.hconversion.get_parameter("grid_north_pole_latitude"),
        )
        self.assertEqual(
            self.hcr["grid_mapping_name"],
            self.hconversion.get_parameter("grid_mapping_name"),
        )
        self.assertEqual(
            self.hcr.get("earth_radius"),
            self.datum.get_parameter("earth_radius"),
        )
        self.assertEqual(
            self.hcr.get("grid_north_pole_latitude", "qwerty"),
            self.hconversion.get_parameter("grid_north_pole_latitude"),
        )
        self.assertIsNone(self.hcr.get("qwerty"))
        self.assertEqual(self.hcr.get("qwerty", 12), 12)
        with self.assertRaises(Exception):
            self.hcr["qwerty"]

    def test_CoordinateReference_structural_signature(self):
        c = self.hcr.copy()

        self.assertIsInstance(c.structural_signature(), tuple)

        c.datum.set_parameter("test", [23])
        s = c.structural_signature()
        self.assertEqual(s[2], ("datum:test", (23.0, ), None))

        c.datum.set_parameter("test", [23, 45])
        s = c.structural_signature()
        self.assertEqual(s[2], ("datum:test", (23.0, 45.0), None))

        c.datum.set_parameter("test", [[23, 45]])
        s = c.structural_signature()
        self.assertEqual(s[2], ("datum:test", ((23.0, 45.0), ), None))

        c.datum.set_parameter("test", np.array([[23, 45], [67, 89]]))
        s = c.structural_signature()
        self.assertEqual(s[2],
                         ("datum:test", ((23.0, 45.0), (67.0, 89.0)), None))
Ejemplo n.º 21
0
class AuxiliaryCoordinateTest(unittest.TestCase):
    f = cf.example_field(1)

    aux1 = cf.AuxiliaryCoordinate()
    aux1.standard_name = "latitude"
    a = numpy.array([
        -30,
        -23.5,
        -17.8123,
        -11.3345,
        -0.7,
        -0.2,
        0,
        0.2,
        0.7,
        11.30003,
        17.8678678,
        23.5,
        30,
    ])
    aux1.set_data(cf.Data(a, "degrees_north"))
    bounds = cf.Bounds()
    b = numpy.empty(a.shape + (2, ))
    b[:, 0] = a - 0.1
    b[:, 1] = a + 0.1
    bounds.set_data(cf.Data(b))
    aux1.set_bounds(bounds)

    def test_AuxiliaryCoordinate_mask_invalid(self):
        a = self.aux1.copy()

        a.mask_invalid()
        self.assertIsNone(a.mask_invalid(inplace=True))

        a.del_bounds()
        a.mask_invalid()
        self.assertIsNone(a.mask_invalid(inplace=True))

    def test_AuxiliaryCoordinate_chunk(self):
        a = self.aux1.copy()
        a.chunk()

    def test_AuxiliaryCoordinate__repr__str__dump(self):
        x = self.f.auxiliary_coordinate("latitude")
        repr(x)
        str(x)
        x.dump(display=False)

    def test_AuxiliaryCoordinate_bounds(self):
        d = self.f.dimension_coordinate("X")
        x = cf.AuxiliaryCoordinate(source=d)

        x.upper_bounds
        x.lower_bounds

    def test_AuxiliaryCoordinate_properties(self):
        x = self.f.auxiliary_coordinate("latitude")

        x.positive = "up"
        self.assertEqual(x.positive, "up")
        del x.positive
        self.assertIsNone(getattr(x, "positive", None))

        x.axis = "Z"
        self.assertEqual(x.axis, "Z")
        del x.axis
        self.assertIsNone(getattr(x, "axis", None))

        d = self.f.dimension_coordinate("X")
        x = cf.AuxiliaryCoordinate(source=d)

    def test_AuxiliaryCoordinate_insert_dimension(self):
        d = self.f.dimension_coordinate("X")
        x = cf.AuxiliaryCoordinate(source=d)

        self.assertEqual(x.shape, (9, ))
        self.assertEqual(x.bounds.shape, (9, 2))

        y = x.insert_dimension(0)
        self.assertEqual(y.shape, (1, 9))
        self.assertEqual(y.bounds.shape, (1, 9, 2), y.bounds.shape)

        x.insert_dimension(-1, inplace=True)
        self.assertEqual(x.shape, (9, 1))
        self.assertEqual(x.bounds.shape, (9, 1, 2), x.bounds.shape)

    def test_AuxiliaryCoordinate_transpose(self):
        x = self.f.auxiliary_coordinate("longitude").copy()

        bounds = cf.Bounds(
            data=cf.Data(numpy.arange(9 * 10 * 4).reshape(9, 10, 4)))
        x.set_bounds(bounds)

        self.assertEqual(x.shape, (9, 10))
        self.assertEqual(x.bounds.shape, (9, 10, 4))

        y = x.transpose()
        self.assertEqual(y.shape, (10, 9))
        self.assertEqual(y.bounds.shape, (10, 9, 4), y.bounds.shape)

        x.transpose([1, 0], inplace=True)
        self.assertEqual(x.shape, (10, 9))
        self.assertEqual(x.bounds.shape, (10, 9, 4), x.bounds.shape)

    def test_AuxiliaryCoordinate_squeeze(self):
        x = self.f.auxiliary_coordinate("longitude").copy()

        bounds = cf.Bounds(
            data=cf.Data(numpy.arange(9 * 10 * 4).reshape(9, 10, 4)))
        x.set_bounds(bounds)
        x.insert_dimension(1, inplace=True)
        x.insert_dimension(0, inplace=True)

        self.assertEqual(x.shape, (1, 9, 1, 10))
        self.assertEqual(x.bounds.shape, (1, 9, 1, 10, 4))

        y = x.squeeze()
        self.assertEqual(y.shape, (9, 10))
        self.assertEqual(y.bounds.shape, (9, 10, 4), y.bounds.shape)

        x.squeeze(2, inplace=True)
        self.assertEqual(x.shape, (1, 9, 10))
        self.assertEqual(x.bounds.shape, (1, 9, 10, 4), x.bounds.shape)

    def test_AuxiliaryCoordinate_floor(self):
        aux = self.aux1.copy()

        a = aux.array
        b = aux.bounds.array

        self.assertTrue((aux.floor().array == numpy.floor(a)).all())
        self.assertTrue((aux.floor().bounds.array == numpy.floor(b)).all())
        self.assertTrue(
            (aux.floor(bounds=False).array == numpy.floor(a)).all())
        self.assertTrue((aux.floor(bounds=False).bounds.array == b).all())

        aux.del_bounds()
        self.assertTrue((aux.floor().array == numpy.floor(a)).all())
        self.assertTrue(
            (aux.floor(bounds=False).array == numpy.floor(a)).all())

        self.assertIsNone(aux.floor(inplace=True))
        self.assertTrue((aux.array == numpy.floor(a)).all())

    def test_AuxiliaryCoordinate_ceil(self):
        aux = self.aux1.copy()

        a = aux.array
        b = aux.bounds.array

        self.assertTrue((aux.ceil().array == numpy.ceil(a)).all())
        self.assertTrue((aux.ceil().bounds.array == numpy.ceil(b)).all())
        self.assertTrue((aux.ceil(bounds=False).array == numpy.ceil(a)).all())
        self.assertTrue((aux.ceil(bounds=False).bounds.array == b).all())

        aux.del_bounds()
        self.assertTrue((aux.ceil().array == numpy.ceil(a)).all())
        self.assertTrue((aux.ceil(bounds=False).array == numpy.ceil(a)).all())

        self.assertIsNone(aux.ceil(inplace=True))
        self.assertTrue((aux.array == numpy.ceil(a)).all())

    def test_AuxiliaryCoordinate_trunc(self):
        aux = self.aux1.copy()

        a = aux.array
        b = aux.bounds.array

        self.assertTrue((aux.trunc().array == numpy.trunc(a)).all())
        self.assertTrue((aux.trunc().bounds.array == numpy.trunc(b)).all())
        self.assertTrue(
            (aux.trunc(bounds=False).array == numpy.trunc(a)).all())
        self.assertTrue((aux.trunc(bounds=False).bounds.array == b).all())

        aux.del_bounds()
        self.assertTrue((aux.trunc().array == numpy.trunc(a)).all())
        self.assertTrue(
            (aux.trunc(bounds=False).array == numpy.trunc(a)).all())

        self.assertIsNone(aux.trunc(inplace=True))
        self.assertTrue((aux.array == numpy.trunc(a)).all())

    def test_AuxiliaryCoordinate_rint(self):
        aux = self.aux1.copy()

        a = aux.array
        b = aux.bounds.array

        x0 = aux.rint()
        x = x0.array

        self.assertTrue((x == numpy.rint(a)).all(), x)
        self.assertTrue((aux.rint().bounds.array == numpy.rint(b)).all())
        self.assertTrue((aux.rint(bounds=False).array == numpy.rint(a)).all())
        self.assertTrue((aux.rint(bounds=False).bounds.array == b).all())

        aux.del_bounds()
        self.assertTrue((aux.rint().array == numpy.rint(a)).all())
        self.assertTrue((aux.rint(bounds=False).array == numpy.rint(a)).all())

        self.assertIsNone(aux.rint(inplace=True))
        self.assertTrue((aux.array == numpy.rint(a)).all())

    def test_AuxiliaryCoordinate_close(self):
        aux = self.aux1.copy()
        aux.close()

    def test_AuxiliaryCoordinate_sin_cos_tan(self):
        aux = self.aux1.copy()

        aux.cos()
        self.assertIsNone(aux.cos(inplace=True))

        aux.sin()
        self.assertIsNone(aux.sin(inplace=True))

        aux.tan()
        self.assertIsNone(aux.tan(inplace=True))

    def test_AuxiliaryCoordinate_log_exp(self):
        aux = self.aux1.copy()

        aux.exp()
        self.assertIsNone(aux.exp(inplace=True))

        aux.log()
        self.assertIsNone(aux.log(inplace=True))

    def test_AuxiliaryCoordinate_count(self):
        aux = self.aux1.copy()

        aux.count()

        aux.del_data()
        with self.assertRaises(Exception):
            aux.count()

    def test_AuxiliaryCoordinate_cyclic(self):
        aux = self.aux1.copy()

        self.assertEqual(aux.cyclic(), set())
        self.assertEqual(aux.cyclic(0), set())
        self.assertEqual(aux.cyclic(), set([0]))

    def test_AuxiliaryCoordinate_roll(self):
        aux = self.aux1.copy()

        aux.roll(0, 3)
        self.assertIsNone(aux.roll(-1, 4, inplace=True))

    def test_AuxiliaryCoordinate_round(self):
        aux = self.aux1.copy()

        a = aux.array
        b = aux.bounds.array

        for decimals in (0, 1, 2, 3, 4, 5):
            aux = self.aux1.copy()

            self.assertTrue(
                (aux.round(decimals).array == numpy.round(a, decimals)).all())
            self.assertTrue((aux.round(decimals).bounds.array == numpy.round(
                b, decimals)).all())
            self.assertTrue(
                (aux.round(decimals,
                           bounds=False).array == numpy.round(a,
                                                              decimals)).all())
            self.assertTrue((aux.round(decimals,
                                       bounds=False).bounds.array == b).all())

            aux.del_bounds()
            self.assertTrue(
                (aux.round(decimals).array == numpy.round(a, decimals)).all())
            self.assertTrue(
                (aux.round(decimals,
                           bounds=False).array == numpy.round(a,
                                                              decimals)).all())

            self.assertIsNone(aux.round(decimals, inplace=True))
            self.assertTrue((aux.array == numpy.round(a, decimals)).all())

    def test_AuxiliaryCoordinate_clip(self):
        aux = self.aux1.copy()

        a = aux.array
        b = aux.bounds.array

        self.assertTrue((aux.clip(-15, 25).array == numpy.clip(a, -15,
                                                               25)).all())
        self.assertTrue(
            (aux.clip(-15, 25).bounds.array == numpy.clip(b, -15, 25)).all())
        self.assertTrue(
            (aux.clip(-15, 25, bounds=False).array == numpy.clip(a, -15,
                                                                 25)).all())
        self.assertTrue((aux.clip(-15, 25,
                                  bounds=False).bounds.array == b).all())

        aux.del_bounds()
        self.assertTrue((aux.clip(-15, 25).array == numpy.clip(a, -15,
                                                               25)).all())
        self.assertTrue(
            (aux.clip(-15, 25, bounds=False).array == numpy.clip(a, -15,
                                                                 25)).all())

        self.assertIsNone(aux.clip(-15, 25, inplace=True))
Ejemplo n.º 22
0
    def test_example_field(self):
        for f in cf.example_fields():
            f.dump(display=False)

        with self.assertRaises(ValueError):
            cf.example_field(-999)
Ejemplo n.º 23
0
    def test_groups(self):
        f = cf.example_field(1)

        ungrouped_file = ungrouped_file1
        grouped_file = grouped_file1

        # Add a second grid mapping
        datum = cf.Datum(parameters={"earth_radius": 7000000})
        conversion = cf.CoordinateConversion(
            parameters={"grid_mapping_name": "latitude_longitude"}
        )

        grid = cf.CoordinateReference(
            coordinate_conversion=conversion,
            datum=datum,
            coordinates=["auxiliarycoordinate0", "auxiliarycoordinate1"],
        )

        f.set_construct(grid)

        grid0 = f.construct("grid_mapping_name:rotated_latitude_longitude")
        grid0.del_coordinate("auxiliarycoordinate0")
        grid0.del_coordinate("auxiliarycoordinate1")

        cf.write(f, ungrouped_file)
        g = cf.read(ungrouped_file, verbose=1)
        self.assertEqual(len(g), 1)
        g = g[0]
        self.assertTrue(f.equals(g, verbose=2))

        # ------------------------------------------------------------
        # Move the field construct to the /forecast/model group
        # ------------------------------------------------------------
        g.nc_set_variable_groups(["forecast", "model"])
        cf.write(g, grouped_file)

        nc = netCDF4.Dataset(grouped_file, "r")
        self.assertIn(
            f.nc_get_variable(),
            nc.groups["forecast"].groups["model"].variables,
        )
        nc.close()

        h = cf.read(grouped_file, verbose=1)
        self.assertEqual(len(h), 1, repr(h))
        self.assertTrue(f.equals(h[0], verbose=2))

        # ------------------------------------------------------------
        # Move constructs one by one to the /forecast group. The order
        # in which we do this matters!
        # ------------------------------------------------------------
        for name in (
            "longitude",  # Auxiliary coordinate
            "latitude",  # Auxiliary coordinate
            "long_name=Grid latitude name",  # Auxiliary coordinate
            "measure:area",  # Cell measure
            "surface_altitude",  # Domain ancillary
            "air_temperature standard_error",  # Field ancillary
            "grid_mapping_name:rotated_latitude_longitude",
            "time",  # Dimension coordinate
            "grid_latitude",  # Dimension coordinate
        ):
            g.construct(name).nc_set_variable_groups(["forecast"])
            cf.write(g, grouped_file, verbose=1)

            # Check that the variable is in the right group
            nc = netCDF4.Dataset(grouped_file, "r")
            self.assertIn(
                f.construct(name).nc_get_variable(),
                nc.groups["forecast"].variables,
            )
            nc.close()

            # Check that the field construct hasn't changed
            h = cf.read(grouped_file, verbose=1)
            self.assertEqual(len(h), 1, repr(h))
            self.assertTrue(f.equals(h[0], verbose=2), name)

        # ------------------------------------------------------------
        # Move bounds to the /forecast group
        # ------------------------------------------------------------
        name = "grid_latitude"
        g.construct(name).bounds.nc_set_variable_groups(["forecast"])
        cf.write(g, grouped_file)

        nc = netCDF4.Dataset(grouped_file, "r")
        self.assertIn(
            f.construct(name).bounds.nc_get_variable(),
            nc.groups["forecast"].variables,
        )
        nc.close()

        h = cf.read(grouped_file, verbose="WARNING")
        self.assertEqual(len(h), 1, repr(h))
        self.assertTrue(f.equals(h[0], verbose=2))
Ejemplo n.º 24
0
    def test_groups_dimension(self):
        f = cf.example_field(0)

        ungrouped_file = ungrouped_file4
        grouped_file = grouped_file4

        cf.write(f, ungrouped_file)
        g = cf.read(ungrouped_file, verbose=1)
        self.assertEqual(len(g), 1)
        g = g[0]
        self.assertTrue(f.equals(g, verbose=3))

        # ------------------------------------------------------------
        # Move the field construct to the /forecast/model group
        # ------------------------------------------------------------
        g.nc_set_variable_groups(["forecast", "model"])

        # ------------------------------------------------------------
        # Move all data constructs to the /forecast group
        # ------------------------------------------------------------
        for construct in g.constructs.filter_by_data().values():
            construct.nc_set_variable_groups(["forecast"])

        # ------------------------------------------------------------
        # Move all coordinate bounds constructs to the /forecast group
        # ------------------------------------------------------------
        for construct in g.coordinates().values():
            try:
                construct.bounds.nc_set_variable_groups(["forecast"])
            except ValueError:
                pass

        cf.write(g, grouped_file, verbose=1)

        nc = netCDF4.Dataset(grouped_file, "r")
        self.assertIn(
            f.nc_get_variable(),
            nc.groups["forecast"].groups["model"].variables,
        )

        for key, construct in g.constructs.filter_by_data().items():
            self.assertIn(
                f.constructs[key].nc_get_variable(),
                nc.groups["forecast"].variables,
            )

        nc.close()

        h = cf.read(grouped_file, verbose=1)
        self.assertEqual(len(h), 1)
        h = h[0]
        self.assertTrue(f.equals(h, verbose=3))

        # ------------------------------------------------------------
        # Move all the lat dimension to the /forecast group
        # ------------------------------------------------------------
        key = g.domain_axis_key("latitude")
        domain_axis = g.constructs[key]
        domain_axis.nc_set_dimension_groups(["forecast"])

        cf.write(g, grouped_file, verbose=1)

        h = cf.read(grouped_file, verbose=1)
        self.assertEqual(len(h), 1)
        h = h[0]
        self.assertTrue(f.equals(h, verbose=3))
Ejemplo n.º 25
0
    def test_groups_geometry(self):
        f = cf.example_field(6)

        ungrouped_file = ungrouped_file2
        grouped_file = grouped_file2

        cf.write(f, ungrouped_file)
        g = cf.read(ungrouped_file, verbose=1)
        self.assertEqual(len(g), 1)
        g = g[0]

        self.assertTrue(f.equals(g, verbose=3))

        # ------------------------------------------------------------
        # Move the field construct to the /forecast/model group
        # ------------------------------------------------------------
        g.nc_set_variable_groups(["forecast", "model"])
        cf.write(g, grouped_file)

        nc = netCDF4.Dataset(grouped_file, "r")
        self.assertIn(
            f.nc_get_variable(),
            nc.groups["forecast"].groups["model"].variables,
        )
        nc.close()

        h = cf.read(grouped_file)
        self.assertEqual(len(h), 1, repr(h))
        self.assertTrue(f.equals(h[0], verbose=3))

        # ------------------------------------------------------------
        # Move the geometry container to the /forecast group
        # ------------------------------------------------------------
        g.nc_set_geometry_variable_groups(["forecast"])
        cf.write(g, grouped_file)

        # Check that the variable is in the right group
        nc = netCDF4.Dataset(grouped_file, "r")
        self.assertIn(
            f.nc_get_geometry_variable(), nc.groups["forecast"].variables
        )
        nc.close()

        # Check that the field construct hasn't changed
        h = cf.read(grouped_file)
        self.assertEqual(len(h), 1, repr(h))
        self.assertTrue(f.equals(h[0], verbose=2))

        # ------------------------------------------------------------
        # Move a node coordinate variable to the /forecast group
        # ------------------------------------------------------------
        g.construct("longitude").bounds.nc_set_variable_groups(["forecast"])
        cf.write(g, grouped_file)

        # Check that the variable is in the right group
        nc = netCDF4.Dataset(grouped_file, "r")
        self.assertIn(
            f.construct("longitude").bounds.nc_get_variable(),
            nc.groups["forecast"].variables,
        )
        nc.close()

        # Check that the field construct hasn't changed
        h = cf.read(grouped_file)
        self.assertEqual(len(h), 1, repr(h))
        self.assertTrue(f.equals(h[0], verbose=2))

        # ------------------------------------------------------------
        # Move a node count variable to the /forecast group
        # ------------------------------------------------------------
        ncvar = g.construct("longitude").get_node_count().nc_get_variable()
        g.nc_set_component_variable_groups("node_count", ["forecast"])

        cf.write(g, grouped_file)

        # Check that the variable is in the right group
        nc = netCDF4.Dataset(grouped_file, "r")
        self.assertIn(ncvar, nc.groups["forecast"].variables)
        nc.close()

        # Check that the field construct hasn't changed
        h = cf.read(grouped_file, verbose=1)
        self.assertEqual(len(h), 1, repr(h))
        self.assertTrue(f.equals(h[0], verbose=2))

        # ------------------------------------------------------------
        # Move a part node count variable to the /forecast group
        # ------------------------------------------------------------
        ncvar = (
            g.construct("longitude").get_part_node_count().nc_get_variable()
        )
        g.nc_set_component_variable_groups("part_node_count", ["forecast"])

        cf.write(g, grouped_file)

        # Check that the variable is in the right group
        nc = netCDF4.Dataset(grouped_file, "r")
        self.assertIn(ncvar, nc.groups["forecast"].variables)
        nc.close()

        # Check that the field construct hasn't changed
        h = cf.read(grouped_file)
        self.assertEqual(len(h), 1, repr(h))
        self.assertTrue(f.equals(h[0], verbose=2))

        # ------------------------------------------------------------
        # Move interior ring variable to the /forecast group
        # ------------------------------------------------------------
        g.nc_set_component_variable("interior_ring", "interior_ring")
        g.nc_set_component_variable_groups("interior_ring", ["forecast"])

        cf.write(g, grouped_file)

        # Check that the variable is in the right group
        nc = netCDF4.Dataset(grouped_file, "r")
        self.assertIn(
            f.construct("longitude").get_interior_ring().nc_get_variable(),
            nc.groups["forecast"].variables,
        )
        nc.close()

        # Check that the field construct hasn't changed
        h = cf.read(grouped_file, verbose=1)
        self.assertEqual(len(h), 1, repr(h))
        self.assertTrue(f.equals(h[0], verbose=2))
Ejemplo n.º 26
0
class read_writeTest(unittest.TestCase):
    filename = os.path.join(
        os.path.dirname(os.path.abspath(__file__)), "test_file.nc"
    )

    broken_bounds = os.path.join(
        os.path.dirname(os.path.abspath(__file__)), "broken_bounds.cdl"
    )

    string_filename = os.path.join(
        os.path.dirname(os.path.abspath(__file__)), "string_char.nc"
    )

    chunk_sizes = (100000, 300)

    f0 = cf.example_field(0)
    f1 = cf.example_field(1)

    netcdf3_fmts = [
        "NETCDF3_CLASSIC",
        "NETCDF3_64BIT",
        "NETCDF3_64BIT_OFFSET",
        "NETCDF3_64BIT_DATA",
    ]
    netcdf4_fmts = [
        "NETCDF4",
        "NETCDF4_CLASSIC",
    ]
    netcdf_fmts = netcdf3_fmts + netcdf4_fmts

    def test_write_filename(self):
        f = self.f0
        a = f.array

        cf.write(f, tmpfile)
        g = cf.read(tmpfile)

        with self.assertRaises(Exception):
            cf.write(g, tmpfile)

        self.assertTrue((a == g[0].array).all())

    def test_read_mask(self):
        f = self.f0.copy()

        N = f.size

        f.data[1, 1] = cf.masked
        f.data[2, 2] = cf.masked

        f.del_property("_FillValue", None)
        f.del_property("missing_value", None)

        cf.write(f, tmpfile)

        g = cf.read(tmpfile)[0]
        self.assertEqual(numpy.ma.count(g.data.array), N - 2)

        g = cf.read(tmpfile, mask=False)[0]
        self.assertEqual(numpy.ma.count(g.data.array), N)

        g.apply_masking(inplace=True)
        self.assertEqual(numpy.ma.count(g.data.array), N - 2)

        f.set_property("_FillValue", 999)
        f.set_property("missing_value", -111)
        cf.write(f, tmpfile)

        g = cf.read(tmpfile)[0]
        self.assertEqual(numpy.ma.count(g.data.array), N - 2)

        g = cf.read(tmpfile, mask=False)[0]
        self.assertEqual(numpy.ma.count(g.data.array), N)

        g.apply_masking(inplace=True)
        self.assertEqual(numpy.ma.count(g.data.array), N - 2)

    def test_read_directory(self):
        pwd = os.getcwd() + "/"

        dir = "dir_" + inspect.stack()[0][3]

        try:
            os.mkdir(dir)
        except FileExistsError:
            pass
        except Exception:
            raise ValueError(f"Can not mkdir {pwd}{dir}")

        f = "test_file2.nc"
        try:
            os.symlink(pwd + f, pwd + dir + "/" + f)
        except FileExistsError:
            pass

        subdir = dir + "/subdir"
        try:
            os.mkdir(subdir)
        except FileExistsError:
            pass
        except Exception:
            raise ValueError(f"Can not mkdir {pwd}{subdir}")

        for f in ("test_file3.nc", "test_file.nc"):
            try:
                os.symlink(pwd + f, pwd + subdir + "/" + f)
            except FileExistsError:
                pass

        f = cf.read(dir, aggregate=False)
        self.assertEqual(len(f), 1, f)

        f = cf.read(dir, recursive=True, aggregate=False)
        self.assertEqual(len(f), 3)

        f = cf.read([dir, subdir], aggregate=False)
        self.assertEqual(len(f), 3)

        f = cf.read([subdir, dir], aggregate=False)
        self.assertEqual(len(f), 3)

        f = cf.read([dir, subdir], recursive=True, aggregate=False)
        self.assertEqual(len(f), 5)

        f = cf.read(subdir, aggregate=False)
        self.assertEqual(len(f), 2)

        f = cf.read(subdir, recursive=True, aggregate=False)
        self.assertEqual(len(f), 2)

        shutil.rmtree(dir)

    def test_read_select(self):
        # select on field list
        f = cf.read(self.filename, select="eastward_wind")
        g = cf.read(self.filename)
        self.assertTrue(f.equals(g, verbose=2), "Bad read with select keyword")

    def test_read_squeeze(self):
        # select on field list
        cf.read(self.filename, squeeze=True)
        cf.read(self.filename, unsqueeze=True)
        with self.assertRaises(Exception):
            cf.read(self.filename, unsqueeze=True, squeeze=True)

    def test_read_aggregate(self):
        cf.read(self.filename, aggregate=True)
        cf.read(self.filename, aggregate=False)
        cf.read(self.filename, aggregate={})

    def test_read_extra(self):
        # Test field keyword of cf.read
        filename = self.filename

        f = cf.read(filename)
        self.assertEqual(len(f), 1, "\n" + str(f))

        f = cf.read(filename, extra=["auxiliary_coordinate"])
        self.assertEqual(len(f), 4, "\n" + str(f))

        f = cf.read(filename, extra="cell_measure")
        self.assertEqual(len(f), 2, "\n" + str(f))

        f = cf.read(filename, extra=["field_ancillary"])
        self.assertEqual(len(f), 5, "\n" + str(f))

        f = cf.read(filename, extra="domain_ancillary", verbose=0)
        self.assertEqual(len(f), 4, "\n" + str(f))

        f = cf.read(
            filename, extra=["field_ancillary", "auxiliary_coordinate"]
        )
        self.assertEqual(len(f), 8, "\n" + str(f))

        self.assertEqual(
            len(
                cf.read(
                    filename,
                    extra=["domain_ancillary", "auxiliary_coordinate"],
                )
            ),
            7,
        )
        f = cf.read(
            filename,
            extra=["domain_ancillary", "cell_measure", "auxiliary_coordinate"],
        )
        self.assertEqual(len(f), 8, "\n" + str(f))

        f = cf.read(
            filename,
            extra=(
                "field_ancillary",
                "dimension_coordinate",
                "cell_measure",
                "auxiliary_coordinate",
                "domain_ancillary",
            ),
        )
        self.assertEqual(len(f), 15, "\n" + str(f))

    def test_read_write_format(self):
        cf.write(self.f1, tmpfile)

        for chunksize in self.chunk_sizes:
            with cf.chunksize(chunksize):
                for fmt in self.netcdf3_fmts + ["CFA"]:
                    f = cf.read(tmpfile)[0]

                    cf.write(f, tmpfile2, fmt=fmt)
                    g = cf.read(tmpfile2, verbose=0)
                    self.assertEqual(len(g), 1)
                    g = g[0]

                    self.assertTrue(
                        f.equals(g, verbose=1),
                        f"Bad read/write of format {fmt!r}",
                    )

    def test_write_netcdf_mode(self):
        """Test the `mode` parameter to `write`, notably append mode."""
        g = cf.read(self.filename)  # note 'g' has one field

        # Test special case #1: attempt to append fields with groups
        # (other than 'root') which should be forbidden. Using fmt="NETCDF4"
        # since it is the only format where groups are allowed.
        #
        # Note: this is not the most natural test to do first, but putting
        # it before the rest reduces spurious seg faults for me, so...
        g[0].nc_set_variable_groups(["forecast", "model"])
        cf.write(g, tmpfile, fmt="NETCDF4", mode="w")  # 1. overwrite to wipe
        f = cf.read(tmpfile)
        with self.assertRaises(ValueError):
            cf.write(g[0], tmpfile, fmt="NETCDF4", mode="a")

        # Test special case #2: attempt to append fields with contradictory
        # featureType to the original file:
        g[0].nc_clear_variable_groups()
        g[0].nc_set_global_attribute("featureType", "profile")
        cf.write(
            g,
            tmpfile,
            fmt="NETCDF4",
            mode="w",
            global_attributes=("featureType", "profile"),
        )  # 1. overwrite to wipe
        h = cf.example_field(3)
        h.nc_set_global_attribute("featureType", "timeSeries")
        with self.assertRaises(ValueError):
            cf.write(h, tmpfile, fmt="NETCDF4", mode="a")
        # Now remove featureType attribute for subsquent tests:
        g_attrs = g[0].nc_clear_global_attributes()
        del g_attrs["featureType"]
        g[0].nc_set_global_attributes(g_attrs)

        # Set a non-trivial (i.e. not only 'Conventions') global attribute to
        # make the global attribute testing more robust:
        add_global_attr = ["remark", "A global comment."]
        original_global_attrs = g[0].nc_global_attributes()
        original_global_attrs[add_global_attr[0]] = None  # -> None on fields
        g[0].nc_set_global_attribute(*add_global_attr)

        # First test a bad mode value:
        with self.assertRaises(ValueError):
            cf.write(g[0], tmpfile, mode="g")

        g_copy = g.copy()

        for fmt in self.netcdf_fmts:  # test over all netCDF 3 and 4 formats
            # Other tests cover write as default mode (i.e. test with no mode
            # argument); here test explicit provision of 'w' as argument:
            cf.write(
                g,
                tmpfile,
                fmt=fmt,
                mode="w",
                global_attributes=add_global_attr,
            )
            f = cf.read(tmpfile)

            new_length = 1  # since 1 == len(g)
            self.assertEqual(len(f), new_length)
            # Ignore as 'remark' should be 'None' on the field as tested below
            self.assertTrue(f[0].equals(g[0], ignore_properties=["remark"]))
            self.assertEqual(
                f[0].nc_global_attributes(), original_global_attrs
            )

            # Main aspect of this test: testing the append mode ('a'): now
            # append all other example fields, to check a diverse variety.
            for ex_field_n, ex_field in enumerate(cf.example_fields()):
                # Note: after Issue #141, this skip can be removed.
                if ex_field_n == 1:
                    continue

                # Skip since "RuntimeError: Can't create variable in
                # NETCDF4_CLASSIC file from (2)  (NetCDF: Attempting netcdf-4
                # operation on strict nc3 netcdf-4 file)" i.e. not possible.
                if fmt == "NETCDF4_CLASSIC" and ex_field_n in (6, 7):
                    continue

                # Skip since "Can't write int64 data from <Count: (2) > to a
                # NETCDF3_CLASSIC file" causes a ValueError i.e. not possible.
                # Note: can remove this when Issue #140 is closed.
                if fmt in self.netcdf3_fmts and ex_field_n == 6:
                    continue

                cf.write(ex_field, tmpfile, fmt=fmt, mode="a")
                f = cf.read(tmpfile)

                if ex_field_n == 5:  # another special case
                    # The n=2 and n=5 example fields for cf-python aggregate
                    # down to one field, e.g. for b as n=2 and c as n=5:
                    #   >>> c.equals(b, verbose=-1)
                    #   Data: Different shapes: (118, 5, 8) != (36, 5, 8)
                    #   Field: Different data
                    #   False
                    #   >>> a = cf.aggregate([b, c])
                    #   >>> a
                    #   [<CF Field: air_potential_temperature(
                    #    time(154), latitude(5), longitude(8)) K>]
                    #
                    # therefore need to check FL length hasn't changed and
                    # (further below) that n=2,5 aggregated field is present.
                    pass  # i.e. new_length should remain the same as before
                else:
                    new_length += 1  # should be exactly one more field now
                self.assertEqual(len(f), new_length)

                if ex_field_n == 5:
                    ex_n2_and_n5_aggregated = cf.aggregate(
                        [cf.example_field(2), cf.example_field(5)]
                    )[0]
                    self.assertTrue(
                        any(
                            [
                                ex_n2_and_n5_aggregated.equals(
                                    file_field,
                                    ignore_properties=[
                                        "comment",
                                        "featureType",
                                        "remark",
                                    ],
                                )
                                for file_field in f
                            ]
                        )
                    )
                else:
                    # Can't guarantee order of fields created during append op.
                    # so check new field is *somewhere* in read-in fieldlist
                    self.assertTrue(
                        any(
                            [
                                ex_field.equals(
                                    file_field,
                                    ignore_properties=[
                                        "comment",
                                        "featureType",
                                        "remark",
                                    ],
                                )
                                for file_field in f
                            ]
                        )
                    )
                for file_field in f:
                    self.assertEqual(
                        file_field.nc_global_attributes(),
                        original_global_attrs,
                    )

            # Now do the same test, but appending all of the example fields in
            # one operation rather than one at a time, to check that it works.
            cf.write(g, tmpfile, fmt=fmt, mode="w")  # 1. overwrite to wipe
            append_ex_fields = cf.example_fields()
            del append_ex_fields[1]  # note: can remove after Issue #141 closed
            # Note: can remove this del when Issue #140 is closed:
            if fmt in self.netcdf3_fmts:
                del append_ex_fields[5]  # n=6 ex_field, minus 1 for above del
            if fmt in "NETCDF4_CLASSIC":
                # Remove n=6 and =7 for reasons as given above (del => minus 1)
                append_ex_fields = append_ex_fields[:5]

            # Equals len(append_ex_fields), + 1 [for original 'g'] and -1 [for
            # field n=5 which aggregates to one with n=2] => + 1 - 1 = + 0:
            overall_length = len(append_ex_fields)
            cf.write(
                append_ex_fields, tmpfile, fmt=fmt, mode="a"
            )  # 2. now append
            f = cf.read(tmpfile)
            self.assertEqual(len(f), overall_length)

            # Also test the mode="r+" alias for mode="a".
            cf.write(g, tmpfile, fmt=fmt, mode="w")  # 1. overwrite to wipe
            cf.write(
                append_ex_fields, tmpfile, fmt=fmt, mode="r+"
            )  # 2. now append
            f = cf.read(tmpfile)
            self.assertEqual(len(f), overall_length)

            # The appended fields themselves are now known to be correct,
            # but we also need to check that any coordinates that are
            # equal across different fields have been shared in the
            # source netCDF, rather than written in separately.
            #
            # Note that the coordinates that are shared across the set of
            # all example fields plus the field 'g' from the contents of
            # the original file (self.filename) are as follows:
            #
            # 1. Example fields n=0 and n=1 share:
            #    <DimensionCoordinate: time(1) days since 2018-12-01 >
            # 2. Example fields n=0, n=2 and n=5 share:
            #    <DimensionCoordinate: latitude(5) degrees_north> and
            #    <DimensionCoordinate: longitude(8) degrees_east>
            # 3. Example fields n=2 and n=5 share:
            #    <DimensionCoordinate: air_pressure(1) hPa>
            # 4. The original file field ('g') and example field n=1 share:
            #    <AuxiliaryCoordinate: latitude(10, 9) degrees_N>,
            #    <AuxiliaryCoordinate: longitude(9, 10) degrees_E>,
            #    <Dimension...: atmosphere_hybrid_height_coordinate(1) >,
            #    <DimensionCoordinate: grid_latitude(10) degrees>,
            #    <DimensionCoordinate: grid_longitude(9) degrees> and
            #    <DimensionCoordinate: time(1) days since 2018-12-01 >
            #
            # Therefore we check all of those coordinates for singularity,
            # i.e. the same underlying netCDF variables, in turn.

            # But first, since the order of the fields appended isn't
            # guaranteed, we must find the mapping of the example fields to
            # their position in the read-in FieldList.
            f = cf.read(tmpfile)
            # Element at index N gives position of example field n=N in file
            file_field_order = []
            for ex_field in cf.example_fields():
                position = [
                    f.index(file_field)
                    for file_field in f
                    if ex_field.equals(
                        file_field,
                        ignore_properties=["comment", "featureType", "remark"],
                    )
                ]
                if not position:
                    position = [None]  # to record skipped example fields
                file_field_order.append(position[0])

            equal_coors = {
                ((0, "dimensioncoordinate2"), (1, "dimensioncoordinate3")),
                ((0, "dimensioncoordinate0"), (2, "dimensioncoordinate1")),
                ((0, "dimensioncoordinate1"), (2, "dimensioncoordinate2")),
                ((0, "dimensioncoordinate0"), (5, "dimensioncoordinate1")),
                ((0, "dimensioncoordinate1"), (5, "dimensioncoordinate2")),
                ((2, "dimensioncoordinate3"), (5, "dimensioncoordinate3")),
            }
            for coor_1, coor_2 in equal_coors:
                ex_field_1_position, c_1 = coor_1
                ex_field_2_position, c_2 = coor_2
                # Now map the appropriate example field to the file FieldList
                f_1 = file_field_order[ex_field_1_position]
                f_2 = file_field_order[ex_field_2_position]
                # None for fields skipped in test, distinguish from falsy 0
                if f_1 is None or f_2 is None:
                    continue
                self.assertEqual(
                    f[f_1]
                    .constructs()
                    .filter_by_identity(c_1)
                    .value()
                    .nc_get_variable(),
                    f[f_2]
                    .constructs()
                    .filter_by_identity(c_2)
                    .value()
                    .nc_get_variable(),
                )

            # Note: after Issue #141, the block below should be un-commented.
            #
            # The original file field 'g' must be at the remaining position:
            # rem_position = list(set(
            #     range(len(f))).difference(set(file_field_order)))[0]
            # # In the final cases, it is easier to remove the one differing
            # # coordinate to get the equal coordinates that should be shared:
            # original_field_coors = dict(f[rem_position].coordinates())
            # ex_field_1_coors = dict(f[file_field_order[1]].coordinates())
            # for orig_coor, ex_1_coor in zip(
            #         original_field_coors.values(), ex_field_1_coors.values()):
            #     # The 'auxiliarycoordinate2' construct differs for both, so
            #     # skip that but otherwise the two fields have the same coors:
            #     if orig_coor.identity == "auxiliarycoordinate2":
            #         continue
            #     self.assertEqual(
            #         orig_coor.nc_get_variable(),
            #         ex_1_coor.nc_get_variable(),
            #     )

            # Check behaviour when append identical fields, as an edge case:
            cf.write(g, tmpfile, fmt=fmt, mode="w")  # 1. overwrite to wipe
            cf.write(g_copy, tmpfile, fmt=fmt, mode="a")  # 2. now append
            f = cf.read(tmpfile)
            self.assertEqual(len(f), 2 * len(g))
            self.assertTrue(
                any(
                    [
                        file_field.equals(g[0], ignore_properties=["remark"])
                        for file_field in f
                    ]
                )
            )
            self.assertEqual(
                f[0].nc_global_attributes(), original_global_attrs
            )

    def test_read_write_netCDF4_compress_shuffle(self):
        for chunksize in self.chunk_sizes:
            with cf.chunksize(chunksize):
                f = cf.read(self.filename)[0]
                for fmt in ("NETCDF4", "NETCDF4_CLASSIC", "CFA4"):
                    cf.write(
                        f,
                        tmpfile,
                        fmt=fmt,
                        compress=1,
                        shuffle=True,
                    )
                    g = cf.read(tmpfile)[0]
                    self.assertTrue(
                        f.equals(g, verbose=2),
                        f"Bad read/write with lossless compression: {fmt}",
                    )

    def test_write_datatype(self):
        for chunksize in self.chunk_sizes:
            with cf.chunksize(chunksize):
                f = cf.read(self.filename)[0]
                self.assertEqual(f.dtype, numpy.dtype(float))
                cf.write(
                    f,
                    tmpfile,
                    fmt="NETCDF4",
                    datatype={numpy.dtype(float): numpy.dtype("float32")},
                )
                g = cf.read(tmpfile)[0]
                self.assertEqual(
                    g.dtype,
                    numpy.dtype("float32"),
                    "datatype read in is " + str(g.dtype),
                )

        # Keyword single
        f = cf.read(self.filename)[0]
        self.assertEqual(f.dtype, numpy.dtype(float))
        cf.write(f, tmpfile, fmt="NETCDF4", single=True)
        g = cf.read(tmpfile)[0]
        self.assertEqual(
            g.dtype,
            numpy.dtype("float32"),
            "datatype read in is " + str(g.dtype),
        )

        # Keyword double
        f = g
        self.assertEqual(f.dtype, numpy.dtype("float32"))
        cf.write(f, tmpfile2, fmt="NETCDF4", double=True)
        g = cf.read(tmpfile2)[0]
        self.assertEqual(
            g.dtype, numpy.dtype(float), "datatype read in is " + str(g.dtype)
        )

        for single in (True, False):
            for double in (True, False):
                with self.assertRaises(Exception):
                    cf.write(g, double=double, single=single)

        datatype = {numpy.dtype(float): numpy.dtype("float32")}
        with self.assertRaises(Exception):
            cf.write(g, datatype=datatype, single=True)

        with self.assertRaises(Exception):
            cf.write(g, datatype=datatype, double=True)

    def test_write_reference_datetime(self):
        for reference_datetime in ("1751-2-3", "1492-12-30"):
            cf.write(self.f0, tmpfile, reference_datetime=reference_datetime)

            g = cf.read(tmpfile)[0]

            t = g.dimension_coordinate("T")
            self.assertEqual(
                t.Units,
                cf.Units("days since " + reference_datetime),
                f"Units written were {t.Units.reftime!r} not "
                f"{reference_datetime!r}",
            )

    def test_read_write_unlimited(self):
        for fmt in ("NETCDF4", "NETCDF3_CLASSIC"):
            f = self.f1.copy()
            domain_axes = f.domain_axes()

            domain_axes["domainaxis0"].nc_set_unlimited(True)
            cf.write(f, tmpfile, fmt=fmt)

            f = cf.read(tmpfile)[0]
            domain_axes = f.domain_axes()
            self.assertTrue(domain_axes["domainaxis0"].nc_is_unlimited())

        fmt = "NETCDF4"
        f = self.f1.copy()
        domain_axes = f.domain_axes()
        domain_axes["domainaxis0"].nc_set_unlimited(True)
        domain_axes["domainaxis2"].nc_set_unlimited(True)
        cf.write(f, tmpfile, fmt=fmt)

        f = cf.read(tmpfile)[0]
        domain_axes = f.domain_axes()
        self.assertTrue(domain_axes["domainaxis0"].nc_is_unlimited())
        self.assertTrue(domain_axes["domainaxis2"].nc_is_unlimited())

    def test_read_pp(self):
        p = cf.read("wgdos_packed.pp")[0]
        p0 = cf.read(
            "wgdos_packed.pp",
            um={
                "fmt": "PP",
                "endian": "big",
                "word_size": 4,
                "version": 4.5,
                "height_at_top_of_model": 23423.65,
            },
        )[0]

        self.assertTrue(p.equals(p0, verbose=2))

    def test_read_CDL(self):
        subprocess.run(
            " ".join(["ncdump", self.filename, ">", tmpfile]),
            shell=True,
            check=True,
        )

        # For the cases of '-h' and '-c', i.e. only header info or coordinates,
        # notably no data, take two cases each: one where there is sufficient
        # info from the metadata to map to fields, and one where there isn't:
        #     1. Sufficient metadata, so should be read-in successfully
        subprocess.run(
            " ".join(["ncdump", "-h", self.filename, ">", tmpfileh]),
            shell=True,
            check=True,
        )
        subprocess.run(
            " ".join(["ncdump", "-c", self.filename, ">", tmpfilec]),
            shell=True,
            check=True,
        )

        #     2. Insufficient metadata, so should error with a message as such
        geometry_1_file = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), "geometry_1.nc"
        )
        subprocess.run(
            " ".join(["ncdump", "-h", geometry_1_file, ">", tmpfileh2]),
            shell=True,
            check=True,
        )
        subprocess.run(
            " ".join(["ncdump", "-c", geometry_1_file, ">", tmpfilec2]),
            shell=True,
            check=True,
        )

        f0 = cf.read(self.filename)[0]

        # Case (1) as above, so read in and check the fields are as should be
        f = cf.read(tmpfile)[0]
        cf.read(tmpfileh)[0]
        c = cf.read(tmpfilec)[0]

        # Case (2) as above, so the right error should be raised on read
        with self.assertRaises(ValueError):
            cf.read(tmpfileh2)[0]

        with self.assertRaises(ValueError):
            cf.read(tmpfilec2)[0]

        self.assertTrue(f0.equals(f, verbose=2))

        self.assertTrue(
            f.construct("grid_latitude").equals(
                c.construct("grid_latitude"), verbose=2
            )
        )
        self.assertTrue(
            f0.construct("grid_latitude").equals(
                c.construct("grid_latitude"), verbose=2
            )
        )

        with self.assertRaises(Exception):
            cf.read("test_read_write.py")

    def test_read_write_string(self):
        f = cf.read(self.string_filename)

        n = int(len(f) / 2)

        for i in range(n):
            j = i + n
            self.assertTrue(
                f[i].data.equals(f[j].data, verbose=1),
                "{!r} {!r}".format(f[i], f[j]),
            )
            self.assertTrue(
                f[j].data.equals(f[i].data, verbose=1),
                "{!r} {!r}".format(f[j], f[i]),
            )

        # Note: Don't loop round all netCDF formats for better
        #       performance. Just one netCDF3 and one netCDF4 format
        #       is sufficient to test the functionality

        for string0 in (True, False):
            for fmt0 in ("NETCDF4", "NETCDF3_CLASSIC"):
                cf.write(f, tmpfile0, fmt=fmt0, string=string0)

                for string1 in (True, False):
                    for fmt1 in ("NETCDF4", "NETCDF3_CLASSIC"):
                        cf.write(f, tmpfile1, fmt=fmt1, string=string1)

                        for i, j in zip(cf.read(tmpfile1), cf.read(tmpfile0)):
                            self.assertTrue(i.equals(j, verbose=1))

    def test_read_broken_bounds(self):
        f = cf.read(self.broken_bounds, verbose=0)
        self.assertEqual(len(f), 2)
Ejemplo n.º 27
0
    def test_write_netcdf_mode(self):
        """Test the `mode` parameter to `write`, notably append mode."""
        g = cf.read(self.filename)  # note 'g' has one field

        # Test special case #1: attempt to append fields with groups
        # (other than 'root') which should be forbidden. Using fmt="NETCDF4"
        # since it is the only format where groups are allowed.
        #
        # Note: this is not the most natural test to do first, but putting
        # it before the rest reduces spurious seg faults for me, so...
        g[0].nc_set_variable_groups(["forecast", "model"])
        cf.write(g, tmpfile, fmt="NETCDF4", mode="w")  # 1. overwrite to wipe
        f = cf.read(tmpfile)
        with self.assertRaises(ValueError):
            cf.write(g[0], tmpfile, fmt="NETCDF4", mode="a")

        # Test special case #2: attempt to append fields with contradictory
        # featureType to the original file:
        g[0].nc_clear_variable_groups()
        g[0].nc_set_global_attribute("featureType", "profile")
        cf.write(
            g,
            tmpfile,
            fmt="NETCDF4",
            mode="w",
            global_attributes=("featureType", "profile"),
        )  # 1. overwrite to wipe
        h = cf.example_field(3)
        h.nc_set_global_attribute("featureType", "timeSeries")
        with self.assertRaises(ValueError):
            cf.write(h, tmpfile, fmt="NETCDF4", mode="a")
        # Now remove featureType attribute for subsquent tests:
        g_attrs = g[0].nc_clear_global_attributes()
        del g_attrs["featureType"]
        g[0].nc_set_global_attributes(g_attrs)

        # Set a non-trivial (i.e. not only 'Conventions') global attribute to
        # make the global attribute testing more robust:
        add_global_attr = ["remark", "A global comment."]
        original_global_attrs = g[0].nc_global_attributes()
        original_global_attrs[add_global_attr[0]] = None  # -> None on fields
        g[0].nc_set_global_attribute(*add_global_attr)

        # First test a bad mode value:
        with self.assertRaises(ValueError):
            cf.write(g[0], tmpfile, mode="g")

        g_copy = g.copy()

        for fmt in self.netcdf_fmts:  # test over all netCDF 3 and 4 formats
            # Other tests cover write as default mode (i.e. test with no mode
            # argument); here test explicit provision of 'w' as argument:
            cf.write(
                g,
                tmpfile,
                fmt=fmt,
                mode="w",
                global_attributes=add_global_attr,
            )
            f = cf.read(tmpfile)

            new_length = 1  # since 1 == len(g)
            self.assertEqual(len(f), new_length)
            # Ignore as 'remark' should be 'None' on the field as tested below
            self.assertTrue(f[0].equals(g[0], ignore_properties=["remark"]))
            self.assertEqual(
                f[0].nc_global_attributes(), original_global_attrs
            )

            # Main aspect of this test: testing the append mode ('a'): now
            # append all other example fields, to check a diverse variety.
            for ex_field_n, ex_field in enumerate(cf.example_fields()):
                # Note: after Issue #141, this skip can be removed.
                if ex_field_n == 1:
                    continue

                # Skip since "RuntimeError: Can't create variable in
                # NETCDF4_CLASSIC file from (2)  (NetCDF: Attempting netcdf-4
                # operation on strict nc3 netcdf-4 file)" i.e. not possible.
                if fmt == "NETCDF4_CLASSIC" and ex_field_n in (6, 7):
                    continue

                # Skip since "Can't write int64 data from <Count: (2) > to a
                # NETCDF3_CLASSIC file" causes a ValueError i.e. not possible.
                # Note: can remove this when Issue #140 is closed.
                if fmt in self.netcdf3_fmts and ex_field_n == 6:
                    continue

                cf.write(ex_field, tmpfile, fmt=fmt, mode="a")
                f = cf.read(tmpfile)

                if ex_field_n == 5:  # another special case
                    # The n=2 and n=5 example fields for cf-python aggregate
                    # down to one field, e.g. for b as n=2 and c as n=5:
                    #   >>> c.equals(b, verbose=-1)
                    #   Data: Different shapes: (118, 5, 8) != (36, 5, 8)
                    #   Field: Different data
                    #   False
                    #   >>> a = cf.aggregate([b, c])
                    #   >>> a
                    #   [<CF Field: air_potential_temperature(
                    #    time(154), latitude(5), longitude(8)) K>]
                    #
                    # therefore need to check FL length hasn't changed and
                    # (further below) that n=2,5 aggregated field is present.
                    pass  # i.e. new_length should remain the same as before
                else:
                    new_length += 1  # should be exactly one more field now
                self.assertEqual(len(f), new_length)

                if ex_field_n == 5:
                    ex_n2_and_n5_aggregated = cf.aggregate(
                        [cf.example_field(2), cf.example_field(5)]
                    )[0]
                    self.assertTrue(
                        any(
                            [
                                ex_n2_and_n5_aggregated.equals(
                                    file_field,
                                    ignore_properties=[
                                        "comment",
                                        "featureType",
                                        "remark",
                                    ],
                                )
                                for file_field in f
                            ]
                        )
                    )
                else:
                    # Can't guarantee order of fields created during append op.
                    # so check new field is *somewhere* in read-in fieldlist
                    self.assertTrue(
                        any(
                            [
                                ex_field.equals(
                                    file_field,
                                    ignore_properties=[
                                        "comment",
                                        "featureType",
                                        "remark",
                                    ],
                                )
                                for file_field in f
                            ]
                        )
                    )
                for file_field in f:
                    self.assertEqual(
                        file_field.nc_global_attributes(),
                        original_global_attrs,
                    )

            # Now do the same test, but appending all of the example fields in
            # one operation rather than one at a time, to check that it works.
            cf.write(g, tmpfile, fmt=fmt, mode="w")  # 1. overwrite to wipe
            append_ex_fields = cf.example_fields()
            del append_ex_fields[1]  # note: can remove after Issue #141 closed
            # Note: can remove this del when Issue #140 is closed:
            if fmt in self.netcdf3_fmts:
                del append_ex_fields[5]  # n=6 ex_field, minus 1 for above del
            if fmt in "NETCDF4_CLASSIC":
                # Remove n=6 and =7 for reasons as given above (del => minus 1)
                append_ex_fields = append_ex_fields[:5]

            # Equals len(append_ex_fields), + 1 [for original 'g'] and -1 [for
            # field n=5 which aggregates to one with n=2] => + 1 - 1 = + 0:
            overall_length = len(append_ex_fields)
            cf.write(
                append_ex_fields, tmpfile, fmt=fmt, mode="a"
            )  # 2. now append
            f = cf.read(tmpfile)
            self.assertEqual(len(f), overall_length)

            # Also test the mode="r+" alias for mode="a".
            cf.write(g, tmpfile, fmt=fmt, mode="w")  # 1. overwrite to wipe
            cf.write(
                append_ex_fields, tmpfile, fmt=fmt, mode="r+"
            )  # 2. now append
            f = cf.read(tmpfile)
            self.assertEqual(len(f), overall_length)

            # The appended fields themselves are now known to be correct,
            # but we also need to check that any coordinates that are
            # equal across different fields have been shared in the
            # source netCDF, rather than written in separately.
            #
            # Note that the coordinates that are shared across the set of
            # all example fields plus the field 'g' from the contents of
            # the original file (self.filename) are as follows:
            #
            # 1. Example fields n=0 and n=1 share:
            #    <DimensionCoordinate: time(1) days since 2018-12-01 >
            # 2. Example fields n=0, n=2 and n=5 share:
            #    <DimensionCoordinate: latitude(5) degrees_north> and
            #    <DimensionCoordinate: longitude(8) degrees_east>
            # 3. Example fields n=2 and n=5 share:
            #    <DimensionCoordinate: air_pressure(1) hPa>
            # 4. The original file field ('g') and example field n=1 share:
            #    <AuxiliaryCoordinate: latitude(10, 9) degrees_N>,
            #    <AuxiliaryCoordinate: longitude(9, 10) degrees_E>,
            #    <Dimension...: atmosphere_hybrid_height_coordinate(1) >,
            #    <DimensionCoordinate: grid_latitude(10) degrees>,
            #    <DimensionCoordinate: grid_longitude(9) degrees> and
            #    <DimensionCoordinate: time(1) days since 2018-12-01 >
            #
            # Therefore we check all of those coordinates for singularity,
            # i.e. the same underlying netCDF variables, in turn.

            # But first, since the order of the fields appended isn't
            # guaranteed, we must find the mapping of the example fields to
            # their position in the read-in FieldList.
            f = cf.read(tmpfile)
            # Element at index N gives position of example field n=N in file
            file_field_order = []
            for ex_field in cf.example_fields():
                position = [
                    f.index(file_field)
                    for file_field in f
                    if ex_field.equals(
                        file_field,
                        ignore_properties=["comment", "featureType", "remark"],
                    )
                ]
                if not position:
                    position = [None]  # to record skipped example fields
                file_field_order.append(position[0])

            equal_coors = {
                ((0, "dimensioncoordinate2"), (1, "dimensioncoordinate3")),
                ((0, "dimensioncoordinate0"), (2, "dimensioncoordinate1")),
                ((0, "dimensioncoordinate1"), (2, "dimensioncoordinate2")),
                ((0, "dimensioncoordinate0"), (5, "dimensioncoordinate1")),
                ((0, "dimensioncoordinate1"), (5, "dimensioncoordinate2")),
                ((2, "dimensioncoordinate3"), (5, "dimensioncoordinate3")),
            }
            for coor_1, coor_2 in equal_coors:
                ex_field_1_position, c_1 = coor_1
                ex_field_2_position, c_2 = coor_2
                # Now map the appropriate example field to the file FieldList
                f_1 = file_field_order[ex_field_1_position]
                f_2 = file_field_order[ex_field_2_position]
                # None for fields skipped in test, distinguish from falsy 0
                if f_1 is None or f_2 is None:
                    continue
                self.assertEqual(
                    f[f_1]
                    .constructs()
                    .filter_by_identity(c_1)
                    .value()
                    .nc_get_variable(),
                    f[f_2]
                    .constructs()
                    .filter_by_identity(c_2)
                    .value()
                    .nc_get_variable(),
                )

            # Note: after Issue #141, the block below should be un-commented.
            #
            # The original file field 'g' must be at the remaining position:
            # rem_position = list(set(
            #     range(len(f))).difference(set(file_field_order)))[0]
            # # In the final cases, it is easier to remove the one differing
            # # coordinate to get the equal coordinates that should be shared:
            # original_field_coors = dict(f[rem_position].coordinates())
            # ex_field_1_coors = dict(f[file_field_order[1]].coordinates())
            # for orig_coor, ex_1_coor in zip(
            #         original_field_coors.values(), ex_field_1_coors.values()):
            #     # The 'auxiliarycoordinate2' construct differs for both, so
            #     # skip that but otherwise the two fields have the same coors:
            #     if orig_coor.identity == "auxiliarycoordinate2":
            #         continue
            #     self.assertEqual(
            #         orig_coor.nc_get_variable(),
            #         ex_1_coor.nc_get_variable(),
            #     )

            # Check behaviour when append identical fields, as an edge case:
            cf.write(g, tmpfile, fmt=fmt, mode="w")  # 1. overwrite to wipe
            cf.write(g_copy, tmpfile, fmt=fmt, mode="a")  # 2. now append
            f = cf.read(tmpfile)
            self.assertEqual(len(f), 2 * len(g))
            self.assertTrue(
                any(
                    [
                        file_field.equals(g[0], ignore_properties=["remark"])
                        for file_field in f
                    ]
                )
            )
            self.assertEqual(
                f[0].nc_global_attributes(), original_global_attrs
            )
Ejemplo n.º 28
0
print(lon.get_interior_ring().data.array)
a = t.constructs.get('domainancillary0')
print(a.array)
bounds = a.bounds
bounds
print(bounds.array)
crs = t.constructs('standard_name:atmosphere_hybrid_height_coordinate').value()
crs
crs.dump()
crs.coordinates()
crs.datum
crs.datum.parameters()
crs.coordinate_conversion
crs.coordinate_conversion.parameters()
crs.coordinate_conversion.domain_ancillaries()
f = cf.example_field(1)
print(f)
print(f.auxiliary_coordinate('altitude', default=None))
g = f.compute_vertical_coordinates()
g.auxiliary_coordinate('altitude').dump()
print(t.cell_methods)
t.cell_methods().ordered()
cm = t.constructs('method:mean').value()
cm
cm.get_axes()
cm.get_method()
cm.qualifiers()
cm.get_qualifier('where')
a = t.get_construct('fieldancillary0')
a
a.properties()
Ejemplo n.º 29
0
    def test_div_xy(self):
        f = cf.example_field(0)

        # Spherical polar coordinates
        theta = 90 - f.convert("Y", full_domain=True)
        sin_theta = theta.sin()

        radius = 2
        r = f.radius(radius)

        for wrap in (False, True, None):
            for one_sided in (False, True):
                x, y = f.grad_xy(
                    radius=radius,
                    x_wrap=wrap,
                    one_sided_at_boundary=one_sided,
                )

                d = cf.div_xy(
                    x,
                    y,
                    radius=radius,
                    x_wrap=wrap,
                    one_sided_at_boundary=one_sided,
                )

                self.assertTrue(d.Units == cf.Units("m-2 rad-2"), d.Units)

                term1 = x.derivative(
                    "X", wrap=wrap, one_sided_at_boundary=one_sided
                )
                term2 = (y * sin_theta).derivative(
                    "Y", one_sided_at_boundary=one_sided
                )

                d0 = (term1 + term2) / (sin_theta * r)

                # Check the data
                with cf.rtol(1e-10):
                    self.assertTrue((d.data == d0.data).all())

                del d.long_name
                d0.set_data(d.data)
                self.assertTrue(d.equals(d0))

        # Cartesian coordinates
        dim_x = f.dimension_coordinate("X")
        dim_y = f.dimension_coordinate("Y")
        dim_x.override_units("m", inplace=True)
        dim_y.override_units("m", inplace=True)
        dim_x.standard_name = "projection_x_coordinate"
        dim_y.standard_name = "projection_y_coordinate"
        f.cyclic("X", iscyclic=False)

        for wrap in (False, True, None):
            for one_sided in (True, False):
                x, y = f.grad_xy(x_wrap=wrap, one_sided_at_boundary=one_sided)

                d = cf.div_xy(
                    x, y, x_wrap=wrap, one_sided_at_boundary=one_sided
                )

                self.assertTrue(d.Units == cf.Units("m-2"))

                term1 = x.derivative(
                    "X", wrap=wrap, one_sided_at_boundary=one_sided
                )
                term2 = y.derivative("Y", one_sided_at_boundary=one_sided)

                d0 = term1 + term2

                del d.long_name
                del d0.long_name
                self.assertTrue(d.equals(d0, rtol=1e-10))
Ejemplo n.º 30
0
    def test_Field_collapse_GROUPS(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        verbose = False

        f = cf.example_field(2)

        g = f.collapse("T: mean", group=cf.M(12), group_span=cf.Y())
        expected_shape = list(f.shape)
        expected_shape[0] = 2

        if verbose:
            print(f)
            print(g)
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        g = f.collapse("T: mean", group=cf.M(12, month=12), group_span=cf.Y())
        expected_shape = list(f.shape)
        expected_shape[0] = 3

        if verbose:
            print(f)
            print(g)
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        g = f.collapse("T: mean", group=cf.M(12, day=16), group_span=cf.Y())
        expected_shape = list(f.shape)
        expected_shape[0] = 2

        if verbose:
            print(f)
            print(g)
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        g = f.collapse(
            "T: mean", group=cf.M(12, month=11, day=27), group_span=cf.Y()
        )
        expected_shape = list(f.shape)
        expected_shape[0] = 3

        if verbose:
            print(f)
            print(g)
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        g = f.collapse(
            "T: mean", group=cf.M(12, month=6, day=27), group_span=cf.Y()
        )
        expected_shape = list(f.shape)
        expected_shape[0] = 2

        if verbose:
            print(f)
            print(g)
            print(
                g.dimension_coordinates("T").value().bounds.data.datetime_array
            )
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        g = f.collapse(
            "T: mean",
            group=cf.M(5, month=12),
            group_span=cf.M(5),
            group_contiguous=1,
        )
        expected_shape = list(f.shape)
        expected_shape[0] = 7

        if verbose:
            print(f)
            print(g)
            print(
                g.dimension_coordinates("T").value().bounds.data.datetime_array
            )
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        g = f.collapse(
            "T: mean",
            group=cf.M(5, month=12),
            group_span=cf.M(5),
            group_contiguous=1,
        )
        expected_shape = list(f.shape)
        expected_shape[0] = 7

        if verbose:
            print(f)
            print(g)
            print(
                g.dimension_coordinates("T").value().bounds.data.datetime_array
            )
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        g = f.collapse(
            "T: mean",
            group=cf.M(5, month=3),
            group_span=cf.M(5),
            group_contiguous=1,
        )
        expected_shape = list(f.shape)
        expected_shape[0] = 7

        if verbose:
            print(f)
            print(g)
            print(
                g.dimension_coordinates("T").value().bounds.data.datetime_array
            )
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        g = f.collapse(
            "T: mean",
            group=cf.M(5, month=2),
            group_span=cf.M(5),
            group_contiguous=1,
        )
        expected_shape = list(f.shape)
        expected_shape[0] = 7

        if verbose:
            print(f)
            print(g)
            print(
                g.dimension_coordinates("T").value().bounds.data.datetime_array
            )
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        g = f.collapse(
            "T: mean",
            group=cf.M(5, month=12),
            group_span=cf.M(5),
            group_contiguous=2,
        )
        expected_shape = list(f.shape)
        expected_shape[0] = 7

        if verbose:
            print(f)
            print(g)
            print(
                g.dimension_coordinates("T").value().bounds.data.datetime_array
            )
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        g = f.collapse("T: mean", group=cf.M(5, month=3))
        expected_shape = list(f.shape)
        expected_shape[0] = 7

        if verbose:
            print(f)
            print(g)
            print(
                g.dimension_coordinates("T").value().bounds.data.datetime_array
            )
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)
        # TODO - look into month offset when M< 12

        g = f.collapse(
            "T: mean",
            group=cf.M(5, month=3),
            group_span=cf.M(5),
            group_contiguous=2,
        )
        expected_shape = list(f.shape)
        expected_shape[0] = 7

        if verbose:
            print(f)
            print(g)
            print(
                g.dimension_coordinates("T").value().bounds.data.datetime_array
            )
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        g = f.collapse("T: mean", group=cf.M(5, month=12), group_contiguous=1)
        expected_shape = list(f.shape)
        expected_shape[0] = 7

        if verbose:
            print(f)
            print(g)
            print(
                g.dimension_coordinates("T").value().bounds.data.datetime_array
            )
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        g = f.collapse("T: mean", group=cf.M(5, month=3), group_contiguous=1)
        expected_shape = list(f.shape)
        expected_shape[0] = 7

        if verbose:
            print(f)
            print(g)
            print(
                g.dimension_coordinates("T").value().bounds.data.datetime_array
            )
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        g = f.collapse("T: mean", group=cf.M(5, month=12), group_contiguous=2)
        expected_shape = list(f.shape)
        expected_shape[0] = 7

        if verbose:
            print(f)
            print(g)
            print(
                g.dimension_coordinates("T").value().bounds.data.datetime_array
            )
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        # Test method=integral with groups
        g = f.collapse(
            "T: integral", group=cf.M(5, month=12), weights=True, measure=True
        )
        expected_shape = list(f.shape)
        expected_shape[0] = 7
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        g = f.collapse("T: mean", group=cf.M(5, month=3), group_contiguous=2)
        expected_shape = list(f.shape)
        expected_shape[0] = 7

        if verbose:
            print(f)
            print(g)
            print(
                g.dimension_coordinates("T").value().bounds.data.datetime_array
            )
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        g = f.collapse(
            "T: mean within years time: minimum over years",
            within_years=cf.M(3),
            group_span=True,
        )
        expected_shape = list(f.shape)
        expected_shape[0] = 4

        if verbose:
            print(f)
            print(g)
            print(
                g.dimension_coordinates("T").value().bounds.data.datetime_array
            )
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)

        g = f.collapse(
            "T: mean within years time: minimum over years",
            within_years=cf.seasons(),
            group_span=cf.M(3),
        )
        expected_shape = list(f.shape)
        expected_shape[0] = 4

        if verbose:
            print(f)
            print(g)
            print(
                g.dimension_coordinates("T").value().bounds.data.datetime_array
            )
            print(g.constructs)
        self.assertEqual(list(g.shape), expected_shape, g.shape)