Пример #1
0
    def test_write_reference_datetime(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        for reference_datetime in ('1751-2-3', '1492-12-30'):
            for chunksize in self.chunk_sizes:
                cf.chunksize(chunksize)
                f = cf.read(self.filename)[0]
                t = cf.DimensionCoordinate(
                    data=cf.Data([123], 'days since 1750-1-1')
                )

                t.standard_name = 'time'
                axisT = f.set_construct(cf.DomainAxis(1))
                f.set_construct(t, axes=[axisT])
                cf.write(f, tmpfile, fmt='NETCDF4',
                         reference_datetime=reference_datetime)
                g = cf.read(tmpfile)[0]
                t = g.dimension_coordinate('T')
                self.assertEqual(
                    t.Units, cf.Units('days since ' + reference_datetime),
                    ('Units written were ' + repr(t.Units.reftime)
                     + ' not ' + repr(reference_datetime)))
        # --- End: for
        cf.chunksize(self.original_chunksize)
    def test_read_write_string(self):
        f = cf.read(self.string_filename)

        n = int(len(f) / 2)

        for i in range(n):
            j = i + n
            self.assertTrue(
                f[i].data.equals(f[j].data, verbose=1),
                "{!r} {!r}".format(f[i], f[j]),
            )
            self.assertTrue(
                f[j].data.equals(f[i].data, verbose=1),
                "{!r} {!r}".format(f[j], f[i]),
            )

        # Note: Don't loop round all netCDF formats for better
        #       performance. Just one netCDF3 and one netCDF4 format
        #       is sufficient to test the functionality

        for string0 in (True, False):
            for fmt0 in ("NETCDF4", "NETCDF3_CLASSIC"):
                cf.write(f, tmpfile0, fmt=fmt0, string=string0)

                for string1 in (True, False):
                    for fmt1 in ("NETCDF4", "NETCDF3_CLASSIC"):
                        cf.write(f, tmpfile1, fmt=fmt1, string=string1)

                        for i, j in zip(cf.read(tmpfile1), cf.read(tmpfile0)):
                            self.assertTrue(i.equals(j, verbose=1))
Пример #3
0
    def test_PP_WGDOS_UNPACKING(self):
        f = cf.read(self.ppfilename)[0]

        self.assertTrue(f.minimum() > 221.71,
                        'Bad unpacking of WGDOS packed data')
        self.assertTrue(f.maximum() < 310.45,
                        'Bad unpacking of WGDOS packed data')

        array = f.array

        for chunksize in self.chunk_sizes:
            cf.chunksize(chunksize)

            f = cf.read(self.ppfilename)[0]

            for fmt in ('NETCDF4', 'CFA4'):
                # print (fmt)
                # f.dump()
                # print (repr(f.dtype))
                # print (f._FillValue)
                # print (type(f._FillValue))
                # f._FillValue = numpy.array(f._FillValue , dtype='float32')
                cf.write(f, tmpfile, fmt=fmt)
                g = cf.read(tmpfile)[0]

                self.assertTrue((f.array == array).all(),
                                'Bad unpacking of PP WGDOS packed data')

                self.assertTrue(f.equals(g, verbose=2),
                                'Bad writing/reading. fmt='+fmt)

        cf.chunksize(self.original_chunksize)
def create_empty_array(datapathea, count_time, divt, latitude, longitude,
                       yearpart, years):
    Sa_track_down_last = np.zeros((1, len(latitude), len(longitude)))
    Sa_track_down_day = np.zeros((1, len(latitude), len(longitude)))
    Sa_track_top_last = np.zeros((1, len(latitude), len(longitude)))
    Sa_track_top_day = np.zeros((1, len(latitude), len(longitude)))
    #
    Sa_time_down_last = np.zeros((1, len(latitude), len(longitude)))
    Sa_time_down_day = np.zeros((1, len(latitude), len(longitude)))
    Sa_time_top_last = np.zeros((1, len(latitude), len(longitude)))
    Sa_time_top_day = np.zeros((1, len(latitude), len(longitude)))
    #
    Sa_dist_down_last = np.zeros((1, len(latitude), len(longitude)))
    Sa_dist_down_day = np.zeros((1, len(latitude), len(longitude)))
    Sa_dist_top_last = np.zeros((1, len(latitude), len(longitude)))
    Sa_dist_top_day = np.zeros((1, len(latitude), len(longitude)))

    # Build cf.field here.
    if yearpart[0] == 365:
        year_o = years[0] + 1
        yearpart_o = 0
    else:
        year_o = years[0]
        yearpart_o = yearpart[0] + 1

    f0l = wrap_netcdf(year_o, yearpart_o, Sa_track_down_last,
                      'Sa_track_down_last', 'm3')
    f0d = wrap_netcdf(year_o, yearpart_o, Sa_track_down_day, 'Sa_track_down',
                      'm3')
    f1l = wrap_netcdf(year_o, yearpart_o, Sa_track_top_last,
                      'Sa_track_top_last', 'm3')
    f1d = wrap_netcdf(year_o, yearpart_o, Sa_track_top_day, 'Sa_track_top',
                      'm3')
    #
    f2l = wrap_netcdf(year_o, yearpart_o, Sa_time_down_last,
                      'Sa_time_down_last', 's')
    f2d = wrap_netcdf(year_o, yearpart_o, Sa_time_down_day, 'Sa_time_down',
                      's')
    f3l = wrap_netcdf(year_o, yearpart_o, Sa_time_top_last, 'Sa_time_top_last',
                      's')
    f3d = wrap_netcdf(year_o, yearpart_o, Sa_time_top_day, 'Sa_time_top', 's')
    #
    f4l = wrap_netcdf(year_o, yearpart_o, Sa_dist_down_last,
                      'Sa_dist_down_last', 'm')
    f4d = wrap_netcdf(year_o, yearpart_o, Sa_dist_down_day, 'Sa_dist_down',
                      'm')
    f5l = wrap_netcdf(year_o, yearpart_o, Sa_dist_top_last, 'Sa_dist_top_last',
                      'm')
    f5d = wrap_netcdf(year_o, yearpart_o, Sa_dist_top_day, 'Sa_dist_top', 'm')

    # Write out netcdf
    if yearpart[0] == 365:
        datapathnc = datapathea[0]
    else:
        datapathnc = datapathea[1]
    f_list = cf.FieldList(
        [f0l, f0d, f1l, f1d, f2l, f2d, f3l, f3d, f4l, f4d, f5l, f5d])
    cf.write(f_list, datapathnc, single=True, unlimited='time', compress=3)

    return
Пример #5
0
 def save_datasets(self, datasets, filename, **kwargs):
     """Save all datasets to one or more files)
     """
     fields = []
     shapes = {}
     for dataset in datasets:
         if dataset.shape in shapes:
             domain = shapes[dataset.shape]
         else:
             lines, pixels = dataset.shape
             # Create a grid_latitude dimension coordinate
             line_coord = cf.DimensionCoordinate(
                 data=cf.Data(np.arange(lines), '1'))
             pixel_coord = cf.DimensionCoordinate(
                 data=cf.Data(np.arange(pixels), '1'))
             domain = cf.Domain(dim={
                 'lines': line_coord,
                 'pixels': pixel_coord
             }, )
             shapes[dataset.shape] = domain
         data = cf.Data(dataset, dataset.info['units'])
         properties = {'standard_name': dataset.info['standard_name']}
         fields.append(
             cf.Field(properties=properties,
                      data=data,
                      axes=['lines', 'pixels'],
                      domain=domain))
     cf.write(fields, filename, fmt='NETCDF4')
Пример #6
0
    def test_DSG_indexed_contiguous(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        f = cf.read(self.indexed_contiguous, verbose=0)

        self.assertEqual(len(f), 2)

        # Select the specific humidity field
        q = f.select('specific_humidity')[0]

        qa = q.data.array

        for n in range(qa.shape[0]):
            for m in range(qa.shape[1]):
                self.assertTrue(
                    q._equals(qa.mask[n, m], self.b.mask[n, m]),
                    str(n) + ' ' + str(m) + ' ' + str(qa[n, m]) + ' ' +
                    str(self.b[n, m]))

        message = repr(qa - self.b)
        # ... +'\n'+repr(qa[2,0])+'\n'+repr(self.b[2, 0])

        self.assertTrue(q._equals(qa, self.b), message)

        cf.write(f, self.tempfilename, verbose=0)
        g = cf.read(self.tempfilename, verbose=0)

        self.assertEqual(len(g), len(f))

        for i in range(len(f)):
            self.assertTrue(g[i].equals(f[i], verbose=2))
Пример #7
0
    def test_geometry_3(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        f = cf.read(self.geometry_3_file, verbose=0)

        self.assertEqual(len(f), 2, 'f = ' + repr(f))

        for g in f:
            self.assertTrue(g.equals(g.copy(), verbose=2))
            self.assertEqual(len(g.auxiliary_coordinates), 3)

        g = f[0]
        for axis in ('X', 'Y', 'Z'):
            coord = g.construct('axis=' + axis)
            self.assertFalse(coord.has_node_count(), 'axis=' + axis)
            self.assertFalse(coord.has_part_node_count(), 'axis=' + axis)
            self.assertFalse(coord.has_interior_ring(), 'axis=' + axis)

        cf.write(f, self.tempfilename, Conventions='CF-' + VN, verbose=0)

        f2 = cf.read(self.tempfilename, verbose=0)

        self.assertEqual(len(f2), 2, 'f2 = ' + repr(f2))

        for a, b in zip(f, f2):
            self.assertTrue(a.equals(b, verbose=2))
Пример #8
0
    def test_DSG_indexed(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        f = cf.read(self.indexed)

        self.assertEqual(len(f), 2)

        # Select the specific humidity field
        q = [g for g in f
             if g.get_property('standard_name') == 'specific_humidity'][0]

        self.assertTrue(q._equals(q.data.array.mask, self.a.mask))

        self.assertTrue(
            q._equals(q.data.array, self.a),
            '\nself.a=\n' + str(self.a) + '\nq.array=\n' + str(q.array)
        )

        cf.write(f, tmpfile, verbose=0)
        g = cf.read(tmpfile)

        self.assertEqual(len(g), len(f))

        for i in range(len(f)):
            self.assertTrue(g[i].equals(f[i], verbose=2))
Пример #9
0
    def test_read_write_string(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        f = cf.read(self.string_filename)

        n = int(len(f) / 2)

        for i in range(0, n):

            j = i + n
            self.assertTrue(
                f[i].data.equals(f[j].data, verbose=1),
                "{!r} {!r}".format(f[i], f[j]),
            )
            self.assertTrue(
                f[j].data.equals(f[i].data, verbose=1),
                "{!r} {!r}".format(f[j], f[i]),
            )

        f0 = cf.read(self.string_filename)
        for string0 in (True, False):
            for fmt0 in ("NETCDF4", "NETCDF3_CLASSIC"):
                cf.write(f0, tmpfile0, fmt=fmt0, string=string0)

                for string1 in (True, False):
                    for fmt1 in ("NETCDF4", "NETCDF3_CLASSIC"):
                        cf.write(f0, tmpfile1, fmt=fmt1, string=string1)

                        for i, j in zip(cf.read(tmpfile1), cf.read(tmpfile0)):
                            self.assertTrue(i.equals(j, verbose=1))
Пример #10
0
    def test_DSG_indexed(self):
        f = cf.read(self.indexed)

        self.assertEqual(len(f), 2)

        # Select the specific humidity field
        q = [
            g
            for g in f
            if g.get_property("standard_name") == "specific_humidity"
        ][0]

        self.assertTrue(q._equals(q.data.array.mask, self.a.mask))

        self.assertTrue(
            q._equals(q.data.array, self.a),
            "\nself.a=\n" + str(self.a) + "\nq.array=\n" + str(q.array),
        )

        cf.write(f, tmpfile, verbose=0)
        g = cf.read(tmpfile)

        self.assertEqual(len(g), len(f))

        for i in range(len(f)):
            self.assertTrue(g[i].equals(f[i], verbose=2))
Пример #11
0
    def test_DSG_indexed_contiguous(self):
        f = cf.read(self.indexed_contiguous, verbose=0)

        self.assertEqual(len(f), 2)

        # Select the specific humidity field
        q = f.select("specific_humidity")[0]

        qa = q.data.array

        for n in range(qa.shape[0]):
            for m in range(qa.shape[1]):
                self.assertTrue(
                    q._equals(qa.mask[n, m], self.b.mask[n, m]),
                    str(n)
                    + " "
                    + str(m)
                    + " "
                    + str(qa[n, m])
                    + " "
                    + str(self.b[n, m]),
                )

        message = repr(qa - self.b)
        # ... +'\n'+repr(qa[2,0])+'\n'+repr(self.b[2, 0])

        self.assertTrue(q._equals(qa, self.b), message)

        cf.write(f, tmpfile, verbose=0)
        g = cf.read(tmpfile, verbose=0)

        self.assertEqual(len(g), len(f))

        for i in range(len(f)):
            self.assertTrue(g[i].equals(f[i], verbose=2))
Пример #12
0
    def test_write_reference_datetime(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        for reference_datetime in ("1751-2-3", "1492-12-30"):
            for chunksize in self.chunk_sizes:
                cf.chunksize(chunksize)
                f = cf.read(self.filename)[0]
                t = cf.DimensionCoordinate(
                    data=cf.Data([123], "days since 1750-1-1"))

                t.standard_name = "time"
                axisT = f.set_construct(cf.DomainAxis(1))
                f.set_construct(t, axes=[axisT])
                cf.write(
                    f,
                    tmpfile,
                    fmt="NETCDF4",
                    reference_datetime=reference_datetime,
                )
                g = cf.read(tmpfile)[0]
                t = g.dimension_coordinate("T")
                self.assertEqual(
                    t.Units,
                    cf.Units("days since " + reference_datetime),
                    ("Units written were " + repr(t.Units.reftime) + " not " +
                     repr(reference_datetime)),
                )
        # --- End: for
        cf.chunksize(self.original_chunksize)
Пример #13
0
    def test_read_write_netCDF4_compress_shuffle(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        tmpfiles.append(tmpfile)

        for chunksize in self.chunk_sizes:
            cf.chunksize(chunksize)
            f = cf.read(self.filename)[0]
            for fmt in ("NETCDF4", "NETCDF4_CLASSIC", "CFA4"):
                for shuffle in (True, ):
                    for compress in (1, ):  # range(10):
                        cf.write(
                            f,
                            tmpfile,
                            fmt=fmt,
                            compress=compress,
                            shuffle=shuffle,
                        )
                        g = cf.read(tmpfile)[0]
                        self.assertTrue(
                            f.equals(g, verbose=2),
                            "Bad read/write with lossless compression: "
                            "{0}, {1}, {2}".format(fmt, compress, shuffle),
                        )
        # --- End: for
        cf.chunksize(self.original_chunksize)
Пример #14
0
    def test_read_write_format(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        for chunksize in self.chunk_sizes:
            cf.chunksize(chunksize)
            for fmt in (
                    "NETCDF3_CLASSIC",
                    "NETCDF3_64BIT",
                    "NETCDF3_64BIT_OFFSET",
                    "NETCDF3_64BIT_DATA",
                    "NETCDF4",
                    "NETCDF4_CLASSIC",
                    "CFA",
            ):
                # print (fmt, string)
                f = cf.read(self.filename)[0]
                f0 = f.copy()
                cf.write(f, tmpfile, fmt=fmt)
                g = cf.read(tmpfile, verbose=0)
                self.assertEqual(len(g), 1, "g = " + repr(g))
                g0 = g[0]

                self.assertTrue(
                    f0.equals(g0, verbose=1),
                    "Bad read/write of format {!r}".format(fmt),
                )
Пример #15
0
    def test_read_mask(self):
        f = self.f0.copy()

        N = f.size

        f.data[1, 1] = cf.masked
        f.data[2, 2] = cf.masked

        f.del_property("_FillValue", None)
        f.del_property("missing_value", None)

        cf.write(f, tmpfile)

        g = cf.read(tmpfile)[0]
        self.assertEqual(numpy.ma.count(g.data.array), N - 2)

        g = cf.read(tmpfile, mask=False)[0]
        self.assertEqual(numpy.ma.count(g.data.array), N)

        g.apply_masking(inplace=True)
        self.assertEqual(numpy.ma.count(g.data.array), N - 2)

        f.set_property("_FillValue", 999)
        f.set_property("missing_value", -111)
        cf.write(f, tmpfile)

        g = cf.read(tmpfile)[0]
        self.assertEqual(numpy.ma.count(g.data.array), N - 2)

        g = cf.read(tmpfile, mask=False)[0]
        self.assertEqual(numpy.ma.count(g.data.array), N)

        g.apply_masking(inplace=True)
        self.assertEqual(numpy.ma.count(g.data.array), N - 2)
Пример #16
0
    def test_write_coordinates(self):
        f = cf.example_field(0)

        cf.write(f, tmpfile, coordinates=True)
        g = cf.read(tmpfile)

        self.assertEqual(len(g), 1)
        self.assertTrue(g[0].equals(f))
Пример #17
0
    def test_groups_compression(self):
        f = cf.example_field(4)

        ungrouped_file = ungrouped_file3
        grouped_file = grouped_file3

        f.compress('indexed_contiguous', inplace=True)
        f.data.get_count().nc_set_variable('count')
        f.data.get_index().nc_set_variable('index')

        cf.write(f, ungrouped_file, verbose=1)
        g = cf.read(ungrouped_file)[0]
        self.assertTrue(f.equals(g, verbose=2))

        # ------------------------------------------------------------
        # Move the field construct to the /forecast/model group
        # ------------------------------------------------------------
        g.nc_set_variable_groups(['forecast', 'model'])

        # ------------------------------------------------------------
        # Move the count variable to the /forecast group
        # ------------------------------------------------------------
        g.data.get_count().nc_set_variable_groups(['forecast'])

        # ------------------------------------------------------------
        # Move the index variable to the /forecast group
        # ------------------------------------------------------------
        g.data.get_index().nc_set_variable_groups(['forecast'])

        # ------------------------------------------------------------
        # Move the coordinates that span the element dimension to the
        # /forecast group
        # ------------------------------------------------------------
        name = 'altitude'
        g.construct(name).nc_set_variable_groups(['forecast'])

        # ------------------------------------------------------------
        # Move the sample dimension to the /forecast group
        # ------------------------------------------------------------
        g.data.get_count().nc_set_sample_dimension_groups(['forecast'])

        cf.write(g, grouped_file, verbose=1)

        nc = netCDF4.Dataset(grouped_file, 'r')
        self.assertIn(f.nc_get_variable(),
                      nc.groups['forecast'].groups['model'].variables)
        self.assertIn(f.data.get_count().nc_get_variable(),
                      nc.groups['forecast'].variables)
        self.assertIn(f.data.get_index().nc_get_variable(),
                      nc.groups['forecast'].variables)
        self.assertIn(
            f.construct('altitude').nc_get_variable(),
            nc.groups['forecast'].variables)
        nc.close()

        h = cf.read(grouped_file, verbose=1)
        self.assertEqual(len(h), 1, repr(h))
        self.assertTrue(f.equals(h[0], verbose=2))
Пример #18
0
def save(v, fname, compress=5, **kwds):
    """save to file

    :param cf.Field v: cf.Field to save
    :param str fname: filename
    :param int compress: 0-9
    """

    cf.write(v, fname, compress=compress, **kwds)
Пример #19
0
    def test_write_filename(self):
        f = self.f0
        a = f.array

        cf.write(f, tmpfile)
        g = cf.read(tmpfile)

        with self.assertRaises(Exception):
            cf.write(g, tmpfile)

        self.assertTrue((a == g[0].array).all())
Пример #20
0
    def test_write_datatype(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        tmpfiles.append(tmpfile)

        for chunksize in self.chunk_sizes:
            cf.chunksize(chunksize)
            f = cf.read(self.filename)[0]
            self.assertEqual(f.dtype, numpy.dtype(float))
            cf.write(
                f,
                tmpfile,
                fmt="NETCDF4",
                datatype={numpy.dtype(float): numpy.dtype("float32")},
            )
            g = cf.read(tmpfile)[0]
            self.assertEqual(
                g.dtype,
                numpy.dtype("float32"),
                "datatype read in is " + str(g.dtype),
            )

        cf.chunksize(self.original_chunksize)

        # Keyword single
        f = cf.read(self.filename)[0]
        self.assertEqual(f.dtype, numpy.dtype(float))
        cf.write(f, tmpfile, fmt="NETCDF4", single=True)
        g = cf.read(tmpfile)[0]
        self.assertEqual(
            g.dtype,
            numpy.dtype("float32"),
            "datatype read in is " + str(g.dtype),
        )

        tmpfiles.append(tmpfile2)

        # Keyword double
        f = g
        self.assertEqual(f.dtype, numpy.dtype("float32"))
        cf.write(f, tmpfile2, fmt="NETCDF4", double=True)
        g = cf.read(tmpfile2)[0]
        self.assertEqual(g.dtype, numpy.dtype(float),
                         "datatype read in is " + str(g.dtype))

        for single in (True, False):
            for double in (True, False):
                with self.assertRaises(Exception):
                    _ = cf.write(g, double=double, single=single)
        # --- End: for

        datatype = {numpy.dtype(float): numpy.dtype("float32")}
        with self.assertRaises(Exception):
            _ = cf.write(g, datatype=datatype, single=True)

        with self.assertRaises(Exception):
            _ = cf.write(g, datatype=datatype, double=True)
Пример #21
0
    def test_write_reference_datetime(self):
        for reference_datetime in ("1751-2-3", "1492-12-30"):
            cf.write(self.f0, tmpfile, reference_datetime=reference_datetime)

            g = cf.read(tmpfile)[0]

            t = g.dimension_coordinate("T")
            self.assertEqual(
                t.Units,
                cf.Units("days since " + reference_datetime),
                f"Units written were {t.Units.reftime!r} not "
                f"{reference_datetime!r}",
            )
Пример #22
0
    def test_geometry_4(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        f = cf.read(self.geometry_4_file, verbose=0)

        self.assertEqual(len(f), 2, 'f = ' + repr(f))

        for g in f:
            self.assertTrue(g.equals(g.copy(), verbose=2))
            self.assertEqual(len(g.auxiliary_coordinates), 3)

        for axis in ('X', 'Y'):
            coord = g.construct('axis=' + axis)
            self.assertTrue(coord.has_node_count(), 'axis=' + axis)
            self.assertFalse(coord.has_part_node_count(), 'axis=' + axis)
            self.assertFalse(coord.has_interior_ring(), 'axis=' + axis)

        cf.write(f, self.tempfilename, Conventions='CF-' + VN, verbose=0)

        f2 = cf.read(self.tempfilename, verbose=0)

        self.assertEqual(len(f2), 2, 'f2 = ' + repr(f2))

        for a, b in zip(f, f2):
            self.assertTrue(a.equals(b, verbose=2))

        # Setting of node count properties
        coord = f[0].construct('axis=X')
        nc = coord.get_node_count()
        cf.write(f, self.tempfilename)
        nc.set_property('long_name', 'Node counts')
        cf.write(f, self.tempfilename, verbose=0)
        nc.nc_set_variable('new_var_name')
        cf.write(f, self.tempfilename, verbose=0)
Пример #23
0
    def test_GATHERING(self):
        f = self.f.copy()

        self.assertEqual(len(f), 3)

        g = f.select("long_name=temp3")[0]

        cf.write(f, tmpfile, verbose=0)
        g = cf.read(tmpfile, verbose=0)

        self.assertEqual(len(g), len(f), str(len(g)) + " " + str(len(f)))

        for a, b in zip(f, g):
            self.assertTrue(b.equals(a, verbose=2))
Пример #24
0
    def test_EXTERNAL_WRITE(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        parent = cf.read(self.parent_file)
        combined = cf.read(self.combined_file)

        # External file contains only the cell measure variable
        f = cf.read(self.parent_file, external=self.external_file)

        cf.write(f, self.tempfilename)
        g = cf.read(self.tempfilename)

        self.assertEqual(len(g), len(combined))

        for i in range(len(g)):
            self.assertTrue(combined[i].equals(g[i], verbose=2))

        cell_measure = g[0].constructs("measure:area").value()

        self.assertFalse(cell_measure.nc_get_external())
        cell_measure.nc_set_external(True)
        self.assertTrue(cell_measure.nc_get_external())
        self.assertTrue(cell_measure.properties())
        self.assertTrue(cell_measure.has_data())

        self.assertTrue(g[0].constructs.filter_by_identity(
            "measure:area").value().nc_get_external())

        cf.write(
            g,
            self.tempfilename_parent,
            external=self.tempfilename_external,
            verbose=0,
        )

        h = cf.read(self.tempfilename_parent, verbose=0)

        self.assertEqual(len(h), len(parent))

        for i in range(len(h)):
            self.assertTrue(parent[i].equals(h[i], verbose=2))

        h = cf.read(self.tempfilename_external)
        external = cf.read(self.external_file)

        self.assertEqual(len(h), len(external))

        for i in range(len(h)):
            self.assertTrue(external[i].equals(h[i], verbose=2))
Пример #25
0
    def test_geometry_2(self):
        f = cf.read(self.geometry_2_file, verbose=0)

        self.assertEqual(len(f), 2, "f = " + repr(f))

        for g in f:
            self.assertTrue(g.equals(g.copy(), verbose=2))
            self.assertEqual(len(g.auxiliary_coordinates()), 3)

        g = f[0]
        for axis in ("X", "Y", "Z"):
            coord = g.construct("axis=" + axis)
            self.assertTrue(coord.has_node_count(), "axis=" + axis)
            self.assertFalse(coord.has_part_node_count(), "axis=" + axis)
            self.assertFalse(coord.has_interior_ring(), "axis=" + axis)

        cf.write(f, self.tempfilename, Conventions="CF-" + VN, verbose=0)

        f2 = cf.read(self.tempfilename, verbose=0)

        self.assertEqual(len(f2), 2, "f2 = " + repr(f2))

        for a, b in zip(f, f2):
            self.assertTrue(a.equals(b, verbose=2))

        # Setting of node count properties
        coord = f[0].construct("axis=X")
        nc = coord.get_node_count()
        cf.write(f, self.tempfilename)
        nc.set_property("long_name", "Node counts")
        cf.write(f, self.tempfilename, verbose=0)
        nc.nc_set_variable("new_var_name")
        cf.write(f, self.tempfilename, verbose=0)
Пример #26
0
    def test_write_filename(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        tmpfiles.append(tmpfile)

        f = cf.example_field(0)
        a = f.array

        cf.write(f, tmpfile)
        g = cf.read(tmpfile)

        with self.assertRaises(Exception):
            cf.write(g, tmpfile)

        self.assertTrue((a == g[0].array).all())
Пример #27
0
    def test_GATHERING(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        f = cf.read(self.gathered, verbose=0)

        self.assertEqual(len(f), 3)

        g = f.select('long_name=temp3')[0]

        cf.write(f, self.tempfilename, verbose=0)
        g = cf.read(self.tempfilename, verbose=0)

        self.assertEqual(len(g), len(f), str(len(g)) + ' ' + str(len(f)))

        for a, b in zip(f, g):
            self.assertTrue(b.equals(a, verbose=2))
Пример #28
0
    def test_GATHERING(self):
        if self.test_only and inspect.stack()[0][3] not in self.test_only:
            return

        f = self.f.copy()

        self.assertEqual(len(f), 3)

        g = f.select("long_name=temp3")[0]

        cf.write(f, tmpfile, verbose=0)
        g = cf.read(tmpfile, verbose=0)

        self.assertEqual(len(g), len(f), str(len(g)) + " " + str(len(f)))

        for a, b in zip(f, g):
            self.assertTrue(b.equals(a, verbose=2))
Пример #29
0
    def test_read_write_format(self):
        cf.write(self.f1, tmpfile)

        for chunksize in self.chunk_sizes:
            with cf.chunksize(chunksize):
                for fmt in self.netcdf3_fmts + ["CFA"]:
                    f = cf.read(tmpfile)[0]

                    cf.write(f, tmpfile2, fmt=fmt)
                    g = cf.read(tmpfile2, verbose=0)
                    self.assertEqual(len(g), 1)
                    g = g[0]

                    self.assertTrue(
                        f.equals(g, verbose=1),
                        f"Bad read/write of format {fmt!r}",
                    )
Пример #30
0
 def test_read_write_netCDF4_compress_shuffle(self):
     for chunksize in self.chunk_sizes:
         with cf.chunksize(chunksize):
             f = cf.read(self.filename)[0]
             for fmt in ("NETCDF4", "NETCDF4_CLASSIC", "CFA4"):
                 cf.write(
                     f,
                     tmpfile,
                     fmt=fmt,
                     compress=1,
                     shuffle=True,
                 )
                 g = cf.read(tmpfile)[0]
                 self.assertTrue(
                     f.equals(g, verbose=2),
                     f"Bad read/write with lossless compression: {fmt}",
                 )