Example #1
0
def create_dataset(output_path, max_zoom, variables_list,
                   polarizations=None):
    if os.path.isfile(output_path):
        return ncDataset(output_path, 'a', format='NETCDF4')

    max_x_tiles = 2**max_zoom

    dataset = ncDataset(output_path, 'w', format='NETCDF4')
    dataset.createDimension('vars', len(variables_list))
    if polarizations is not None:
        dataset.createDimension('polarizations', 4)
    dataset.createDimension('zoom', max_zoom+1)
    dataset.createDimension('x', max_x_tiles)
    dataset.createDimension('y', max_x_tiles)
    dataset.createDimension('shape0', 256)
    dataset.createDimension('shape1', 256)

    if polarizations is not None:
        dims = ('vars', 'polarizations', 'zoom', 'x', 'y', 'shape0', 'shape1')
    else:
        dims = ('vars', 'zoom', 'x', 'y', 'shape0', 'shape1')

    vars_var = dataset.createVariable('Variables', 'S1', ('vars',),
                                      zlib=True, complevel=6)
    vars_var[:] = np.array(variables_list, dtype='string')[:]

    if polarizations is not None:
        polar_var = dataset.createVariable('Polarizations', 'S1',
                                           ('polarizations',),
                                           zlib=True, complevel=6)
        polar_var[:] = np.array(polarizations, dtype='string')[:]

    # u1 = NC_UBYTE 0-255
    dataset.createVariable('Data', 'u1', dims, zlib=True, complevel=6)
    return dataset
Example #2
0
def do(datasets):
    #with s:
    for dataset in datasets:
        if type(dataset) != dict:
            dataset = Dataset.objects.filter(name=dataset)[0]
            name = dataset.name
            uri = dataset.uri
        else:
            name = dataset["name"]
            uri = dataset["uri"]
        try:
            try:
                #get_lock()
                filemtime = datetime.fromtimestamp(
                    os.path.getmtime(
                    os.path.join(
                    config.topologypath, name + ".nc"
                    )))
                #print filemtime
                difference = datetime.now() - filemtime
                if dataset["keep_up_to_date"]:
                    if difference.seconds > .5*3600 or difference.days > 0:
                        #print "true"
                        nc = ncDataset(uri)
                        topo = ncDataset(os.path.join(
                            config.topologypath, name + ".nc"))

                        time1 = nc.variables['time'][-1]
                        time2 = topo.variables['time'][-1]
                        #print time1, time2
                        nc.close()
                        topo.close()
                        if time1 != time2:
                            check = True
                            logger.info("Updating: " + uri)
                            create_topology(name, uri)
                            #while check:
                            #    try:
                            #        check_nc = ncDataset(nclocalpath)
                            #        check_nc.close()
                            #        check = False
                            #    except: # TODO: Catch the specific file corrupt error im looking for here
                            #        create_topology(name, dataset["uri"])
            except:
                logger.info("Initializing: " + uri)
                create_topology(name, uri)
            try:
                nc.close()
                topo.close()
            except:
                pass
        except Exception as detail:
            exc_type, exc_value, exc_traceback = sys.exc_info()
            logger.error("Disabling Error: " +\
                                 repr(traceback.format_exception(exc_type, exc_value,
                                              exc_traceback)))
Example #3
0
    def get_cell_methods_contents(self, ifile):
        """
        From the input file get the cell methods content

        :param ifile:
        :return: [string] cell method content
        """
        variable = os.path.basename(ifile).split('_')[0]
        ncds = ncDataset(ifile)
        v = ncds.variables[variable]
        return getattr(v, 'cell_methods')
Example #4
0
    def ncatted_common_updates(self, file, errors):
        """
        A set of common updates used when all other QC modifications are completed.

        :param file: ncfile name
        :param errors: list
        :return:
        """

        mod_date = datetime.datetime.now().strftime('%Y-%m-%d')

        cp4cds_statement = "As part of the Climate Projections for the Copernicus Data Store (CP4CDS) CMIP5 quality assurance " \
                           "testing the following error(s) were corrected by the CP4CDS team:: \n {} \n " \
                           "The tracking id was also updated; the original is stored under source_trackind_id. \n" \
                           "For further details contact [email protected] or the Centre for Environmental Data Analysis (CEDA) " \
                           "at [email protected]".format('\n'.join(errors))

        # Get original tracking_id
        orig_tracking_id = getattr(ncDataset(file), 'tracking_id')
        self.ncatt._run_ncatted('tracking_id',
                                'global',
                                'o',
                                'c',
                                str(uuid.uuid4()),
                                file,
                                noHistory=True)
        self.ncatt._run_ncatted('cp4cds_update_info',
                                'global',
                                'c',
                                'c',
                                cp4cds_statement,
                                file,
                                noHistory=True)
        self.ncatt._run_ncatted('source_tracking_id',
                                'global',
                                'c',
                                'c',
                                orig_tracking_id,
                                file,
                                noHistory=True)
        self.ncatt._run_ncatted(
            'history',
            'global',
            'a',
            'c',
            "\nUpdates made on {} were made as part of CP4CDS project see cp4cds_update_info"
            .format(mod_date),
            file,
            noHistory=True)
Example #5
0
def update_dataset_cache(dataset):
    try:
        try:
            filemtime = datetime.fromtimestamp(
                os.path.getmtime(
                    os.path.join(
                        settings.TOPOLOGY_PATH, dataset.name + ".nc"
                    )))
            if dataset.keep_up_to_date:
                try:
                    nc = ncDataset(dataset.path())
                    topo = ncDataset(os.path.join(
                        settings.TOPOLOGY_PATH, dataset.name + ".nc"))

                    time1 = nc.variables['time'][-1]
                    time2 = topo.variables['time'][-1]
                    if time1 != time2:
                        logger.info("Updating: " + dataset.path())
                        create_topology(dataset.name, dataset.path(), dataset.latitude_variable or 'lat', dataset.longitude_variable or 'lon')
                    else:
                        logger.info("No new time values found in dataset, nothing to update!")
                except Exception:
                    exc_type, exc_value, exc_traceback = sys.exc_info()
                    logger.error("Disabling Error: " + repr(traceback.format_exception(exc_type, exc_value, exc_traceback)))
                finally:
                    nc.close()
                    topo.close()
            else:
                logger.info("Dataset not marked for update ('keep_up_to_date' is False).  Not doing anything.")
        except Exception:
            logger.info("No cache found, Initializing: " + dataset.path())
            create_topology(dataset.name, dataset.path(), dataset.latitude_variable or 'lat', dataset.longitude_variable or 'lon')

    except Exception:
        exc_type, exc_value, exc_traceback = sys.exc_info()
        logger.error("Disabling Error: " + repr(traceback.format_exception(exc_type, exc_value, exc_traceback)))
Example #6
0
    def _failsafe_parse_(self):
        """
        [private function - not to be called from outside the class]
        ['name' need to be initialised]
        """
        # ==== fail - open it as a normal array and deduce the dimensions from the variable-function names ==== #
        # ==== done by parsing ALL variables in the NetCDF, and comparing their call-parameters with the   ==== #
        # ==== name map available here.                                                                    ==== #
        init_chunk_dict = {}
        self.dataset = ncDataset(str(self.filename))
        refdims = self.dataset.dimensions.keys()
        max_field = ""
        max_dim_names = ()
        max_coincide_dims = 0
        for vname in self.dataset.variables:
            var = self.dataset.variables[vname]
            coincide_dims = []
            for vdname in var.dimensions:
                if vdname in refdims:
                    coincide_dims.append(vdname)
            n_coincide_dims = len(coincide_dims)
            if n_coincide_dims > max_coincide_dims:
                max_field = vname
                max_dim_names = tuple(coincide_dims)
                max_coincide_dims = n_coincide_dims
        self.name = max_field
        for nc_dname in max_dim_names:
            pcls_dname = None
            for dname in self._static_name_maps.keys():
                if nc_dname in self._static_name_maps[dname]:
                    pcls_dname = dname
                    break
            nc_dimsize = None
            pcls_dim_chunksize = None
            if pcls_dname is not None and pcls_dname in self.dimensions:
                pcls_dim_chunksize = self._min_dim_chunksize
            if isinstance(self.chunksize, dict) and pcls_dname is not None:
                nc_dimsize = self.dataset.dimensions[nc_dname].size
                if pcls_dname in self.chunksize.keys():
                    pcls_dim_chunksize = self.chunksize[pcls_dname][1]
            if pcls_dname is not None and nc_dname is not None and nc_dimsize is not None and pcls_dim_chunksize is not None:
                init_chunk_dict[nc_dname] = pcls_dim_chunksize

        # ==== because in this case it has shown that the requested chunksize setup cannot be used, ==== #
        # ==== replace the requested chunksize with this auto-derived version.                      ==== #
        return init_chunk_dict
Example #7
0
def write_attrib_to_nc(nc_path, area_def, start_time, max_zoom_level,
                       resolution, polarizations):
    dataset = ncDataset(nc_path, 'a', format='NETCDF4')
    area_extent = area_def.area_extent
    # minx, miny, maxx, maxy
    dataset.setncattr('geospatial_lat_min',
                      min(area_extent[0], area_extent[2]))
    dataset.setncattr('geospatial_lon_min',
                      min(area_extent[1], area_extent[3]))
    dataset.setncattr('geospatial_lat_max',
                      max(area_extent[0], area_extent[2]))
    dataset.setncattr('geospatial_lon_max',
                      max(area_extent[1], area_extent[3]))

    dataset.setncattr('start_date', start_time.isoformat())

    dataset.setncattr('polarizations', ','.join(polarizations))
    dataset.setncattr('max_zoom_level', str(max_zoom_level))
    dataset.setncattr('resolution', str(resolution))
Example #8
0
def writesubd(date, tile, hisfiles):

    # allocate the buffer (largest one) that host the data
    # from the netCDF to the GDF file
    shape = (nz + 1, ny, nx)
    data = np.zeros(shape, dtype=np.float32)

    subd = subdomains[tile]
    reader = predefined_readers[subd]
    datadir = reader.datadir
    filename = datafilename(datadir, date, subd)
    assert os.path.isfile(filename)

    for quarter in range(4):
        ncfile = hisfiles[quarter]
        with ncDataset(ncfile) as nc:
            for varname in reader.variables:
                varshape = reader.variable_shape[varname]
                ndim = len(varshape)
                ncshape = nc.variables[varname].shape

                vidx, oidx, iidx = set_indices(varshape, ncshape, tile)

                with open(filename, "rb+") as fid:
                    for kt in range(6):
                        hour = quarter * 6 + kt
                        data[vidx].flat = 0.
                        datain = nc.variables[varname][kt]
                        if ndim == 3:
                            data[(vidx, ) + oidx] = datain[(vidx, ) + iidx]
                        else:
                            data[(vidx, ) + oidx] = datain[iidx]

                        offset = reader.get_offset(tile, hour, varname)
                        fid.seek(offset * floatsize)
                        fid.write(data[vidx].tobytes())
                print(
                    f"\rwrite {date} {subd:02} {tile:04} {quarter} {varname}",
                    end="",
                    flush=True)
    print(f"\rwrite {date} {subd:02} {tile:04} -> done", flush=True)
Example #9
0
def create_domain_polygon(dataset):
    from shapely.geometry import Polygon
    from shapely.ops import cascaded_union

    nc = ncDataset(dataset.topology_file)
    nv = nc.variables['nv'][:, :].T-1
    #print np.max(np.max(nv))
    latn = nc.variables['lat'][:]
    lonn = nc.variables['lon'][:]
    lon = nc.variables['lonc'][:]
    lat = nc.variables['latc'][:]
    #print lat, lon, latn, lonn, nv
    index_pos = numpy.asarray(numpy.where(
        (lat <= 90) & (lat >= -90) &
        (lon <= 180) & (lon > 0),)).squeeze()
    index_neg = numpy.asarray(numpy.where(
        (lat <= 90) & (lat >= -90) &
        (lon < 0) & (lon >= -180),)).squeeze()
    #print np.max(np.max(nv)), np.shape(nv), np.shape(lonn), np.shape(latn)
    if len(index_pos) > 0:
        p = deque()
        p_add = p.append
        for i in index_pos:
            flon, flat = lonn[nv[i, 0]], latn[nv[i, 0]]
            lon1, lat1 = lonn[nv[i, 1]], latn[nv[i, 1]]
            lon2, lat2 = lonn[nv[i, 2]], latn[nv[i, 2]]
            if flon < -90:
                flon = flon + 360
            if lon1 < -90:
                lon1 = lon1 + 360
            if lon2 < -90:
                lon2 = lon2 + 360
            p_add(Polygon(((flon, flat),
                           (lon1, lat1),
                           (lon2, lat2),
                           (flon, flat),)))
        domain_pos = cascaded_union(p)
    if len(index_neg) > 0:
        p = deque()
        p_add = p.append
        for i in index_neg:
            flon, flat = lonn[nv[i, 0]], latn[nv[i, 0]]
            lon1, lat1 = lonn[nv[i, 1]], latn[nv[i, 1]]
            lon2, lat2 = lonn[nv[i, 2]], latn[nv[i, 2]]
            if flon > 90:
                flon = flon - 360
            if lon1 > 90:
                lon1 = lon1 - 360
            if lon2 > 90:
                lon2 = lon2 - 360
            p_add(Polygon(((flon, flat),
                           (lon1, lat1),
                           (lon2, lat2),
                           (flon, flat),)))
        domain_neg = cascaded_union(p)
    if len(index_neg) > 0 and len(index_pos) > 0:
        from shapely.prepared import prep
        domain = prep(cascaded_union((domain_neg, domain_pos,)))
    elif len(index_neg) > 0:
        domain = domain_neg
    elif len(index_pos) > 0:
        domain = domain_pos
    else:
        logger.info(nc.__str__())
        logger.info(lat)
        logger.info(lon)
        logger.error("Domain file creation - No data in topology file Length of positive:%u Length of negative:%u" % (len(index_pos), len(index_neg)))
        raise ValueError("No data in file")

    f = open(dataset.domain_file, 'w')
    pickle.dump(domain, f)
    f.close()
    nc.close()
Example #10
0
# 演示导出指定范围数据到一个.nc文件
if __name__ == "__main__":
    from os import listdir
    from os.path import join
    from datetime import datetime
    from netCDF4 import Dataset as ncDataset
    h5path = r"F:\FY-4A"  # FY-4A一级数据所在路径
    ncname = r"F:\FY-4A\nc\test.nc"
    h5list = [join(h5path, x) for x in listdir(h5path)
              if "4000M" in x and "FDI" in x]
    step = 50  # 0.050°
    lat = np.arange(40000, 20000-1, -step) / 1000  # 40~20°N
    lon = np.arange(110000, 130000+1, step) / 1000  # 110~130°E
    channelnums = ("02", "04", "05", "06", "12", "14")
    ncfile = ncDataset(ncname, 'w', format="NETCDF4")
    ncfile.createDimension("lat", len(lat))
    ncfile.createDimension("lon", len(lon))
    ncfile.createDimension("time")  # 不限长
    nclat = ncfile.createVariable("lat", "f4", ("lat",))
    nclon = ncfile.createVariable("lon", "f4", ("lon",))
    nctime = ncfile.createVariable("time", "f8", ("time",))
    nctime.units = "minutes since 0001-01-01 00:00:00.0"
    t = 0
    for channelnum in channelnums:
        channelname = "Channel" + channelnum
        ncfile.createVariable(channelname, "f4", ("time", "lat", "lon"))
    ncfile.set_auto_mask(False)
    nclat[:] = lat
    nclon[:] = lon
    lon, lat = np.meshgrid(lon, lat)
Example #11
0
    def save_as_netcdf(self, filepath):
        """
        Save the ugrid object as a netcdf file.

        :param filepath: path to file you want o save to.  An existing one
                         will be clobbered if it already exists.

        Follows the convention established by the netcdf UGRID working group:

        http://publicwiki.deltares.nl/display/NETCDF/Deltares+CF+proposal+for+Unstructured+Grid+data+model

        """
        mesh_name = self.mesh_name

        # FIXME: Why not use netCDF4.Dataset instead of renaming?
        from netCDF4 import Dataset as ncDataset
        # Create a new netcdf file.
        with ncDataset(filepath, mode="w", clobber=True) as nclocal:

            nclocal.createDimension(mesh_name + '_num_node', len(self.nodes))
            if self._edges is not None:
                nclocal.createDimension(mesh_name + '_num_edge',
                                        len(self.edges))
            if self._boundaries is not None:
                nclocal.createDimension(mesh_name + '_num_boundary',
                                        len(self.boundaries))
            if self._faces is not None:
                nclocal.createDimension(mesh_name + '_num_face',
                                        len(self.faces))
                nclocal.createDimension(mesh_name + '_num_vertices',
                                        self.faces.shape[1])
            nclocal.createDimension('two', 2)

            # mesh topology
            mesh = nclocal.createVariable(
                mesh_name,
                IND_DT,
                (),
            )
            mesh.cf_role = "mesh_topology"
            mesh.long_name = "Topology data of 2D unstructured mesh"
            mesh.topology_dimension = 2
            mesh.node_coordinates = "{0}_node_lon {0}_node_lat".format(
                mesh_name)  # noqa

            if self.edges is not None:
                # Attribute required if variables will be defined on edges.
                mesh.edge_node_connectivity = mesh_name + "_edge_nodes"
                if self.edge_coordinates is not None:
                    # Optional attribute (requires edge_node_connectivity).
                    coord = "{0}_edge_lon {0}_edge_lat".format
                    mesh.edge_coordinates = coord(mesh_name)
            if self.faces is not None:
                mesh.face_node_connectivity = mesh_name + "_face_nodes"
                if self.face_coordinates is not None:
                    # Optional attribute.
                    coord = "{0}_face_lon {0}_face_lat".format
                    mesh.face_coordinates = coord(mesh_name)
            if self.face_edge_connectivity is not None:
                # Optional attribute (requires edge_node_connectivity).
                mesh.face_edge_connectivity = mesh_name + "_face_edges"
            if self.face_face_connectivity is not None:
                # Optional attribute.
                mesh.face_face_connectivity = mesh_name + "_face_links"
            if self.boundaries is not None:
                mesh.boundary_node_connectivity = mesh_name + "_boundary_nodes"

            # FIXME: This could be re-factored to be more generic, rather than
            # separate for each type of data see the coordinates example below.
            if self.faces is not None:
                nc_create_var = nclocal.createVariable
                face_nodes = nc_create_var(
                    mesh_name + "_face_nodes",
                    IND_DT,
                    (mesh_name + '_num_face', mesh_name + '_num_vertices'),
                )
                face_nodes[:] = self.faces

                face_nodes.cf_role = "face_node_connectivity"
                face_nodes.long_name = ("Maps every triangular face to "
                                        "its three corner nodes.")
                face_nodes.start_index = 0

            if self.edges is not None:
                nc_create_var = nclocal.createVariable
                edge_nodes = nc_create_var(
                    mesh_name + "_edge_nodes",
                    IND_DT,
                    (mesh_name + '_num_edge', 'two'),
                )
                edge_nodes[:] = self.edges

                edge_nodes.cf_role = "edge_node_connectivity"
                edge_nodes.long_name = ("Maps every edge to the two "
                                        "nodes that it connects.")
                edge_nodes.start_index = 0

            if self.boundaries is not None:
                nc_create_var = nclocal.createVariable
                boundary_nodes = nc_create_var(
                    mesh_name + "_boundary_nodes",
                    IND_DT,
                    (mesh_name + '_num_boundary', 'two'),
                )
                boundary_nodes[:] = self.boundaries

                boundary_nodes.cf_role = "boundary_node_connectivity"
                boundary_nodes.long_name = ("Maps every boundary segment to "
                                            "the two nodes that it connects.")
                boundary_nodes.start_index = 0

            # Optional "coordinate variables."
            for location in ['face', 'edge', 'boundary']:
                loc = "{0}_coordinates".format(location)
                if getattr(self, loc) is not None:
                    for axis, ind in [('lat', 1), ('lon', 0)]:
                        nc_create_var = nclocal.createVariable
                        name = "{0}_{1}_{2}".format(mesh_name, location, axis)
                        dimensions = "{0}_num_{1}".format(mesh_name, location)
                        var = nc_create_var(
                            name,
                            NODE_DT,
                            dimensions=(dimensions),
                        )
                        loc = "{0}_coordinates".format(location)
                        var[:] = getattr(self, loc)[:, ind]
                        # Attributes of the variable.
                        var.standard_name = ("longitude"
                                             if axis == 'lon' else 'latitude')
                        var.units = ("degrees_east"
                                     if axis == 'lon' else 'degrees_north')
                        name = "Characteristics {0} of 2D mesh {1}".format
                        var.long_name = name(var.standard_name, location)

            # The node data.
            node_lon = nclocal.createVariable(
                mesh_name + '_node_lon',
                self._nodes.dtype,
                (mesh_name + '_num_node', ),
                chunksizes=(len(self.nodes), ),
                # zlib=False,
                # complevel=0,
            )
            node_lon[:] = self.nodes[:, 0]
            node_lon.standard_name = "longitude"
            node_lon.long_name = "Longitude of 2D mesh nodes."
            node_lon.units = "degrees_east"

            node_lat = nclocal.createVariable(
                mesh_name + '_node_lat',
                self._nodes.dtype,
                (mesh_name + '_num_node', ),
                chunksizes=(len(self.nodes), ),
                # zlib=False,
                # complevel=0,
            )
            node_lat[:] = self.nodes[:, 1]
            node_lat.standard_name = "latitude"
            node_lat.long_name = "Latitude of 2D mesh nodes."
            node_lat.units = "degrees_north"

            # Write the associated data.
            for dataset in self.data.values():
                if dataset.location == 'node':
                    shape = (mesh_name + '_num_node', )
                    coordinates = "{0}_node_lon {0}_node_lat".format(mesh_name)
                    chunksizes = (len(self.nodes), )
                elif dataset.location == 'face':
                    shape = (mesh_name + '_num_face', )
                    coord = "{0}_face_lon {0}_face_lat".format
                    coordinates = (coord(mesh_name) if self.face_coordinates
                                   is not None else None)
                    chunksizes = (len(self.faces), )
                elif dataset.location == 'edge':
                    shape = (mesh_name + '_num_edge', )
                    coord = "{0}_edge_lon {0}_edge_lat".format
                    coordinates = (coord(mesh_name) if self.edge_coordinates
                                   is not None else None)
                    chunksizes = (len(self.edges), )
                elif dataset.location == 'boundary':
                    shape = (mesh_name + '_num_boundary', )
                    coord = "{0}_boundary_lon {0}_boundary_lat".format
                    bcoord = self.boundary_coordinates
                    coordinates = (coord(mesh_name)
                                   if bcoord is not None else None)
                    chunksizes = (len(self.boundaries), )
                data_var = nclocal.createVariable(
                    dataset.name,
                    dataset.data.dtype,
                    shape,
                    chunksizes=chunksizes,
                    # zlib=False,
                    # complevel=0,
                )
                data_var[:] = dataset.data
                # Add the standard attributes:
                data_var.location = dataset.location
                data_var.mesh = mesh_name
                if coordinates is not None:
                    data_var.coordinates = coordinates
                # Add the extra attributes.
                for att_name, att_value in dataset.attributes.items():
                    setattr(data_var, att_name, att_value)
            nclocal.sync()
Example #12
0
    def save_as_netcdf(self, filepath):
        """
        Save the ugrid object as a netcdf file.

        :param filepath: path to file you want o save to.  An existing one
                         will be clobbered if it already exists.

        Follows the convention established by the netcdf UGRID working group:

        http://publicwiki.deltares.nl/display/NETCDF/Deltares+CF+proposal+for+Unstructured+Grid+data+model

        """
        mesh_name = self.mesh_name

        # FIXME: Why not use netCDF4.Dataset instead of renaming?
        from netCDF4 import Dataset as ncDataset
        # Create a new netcdf file.
        with ncDataset(filepath, mode="w", clobber=True) as nclocal:

            nclocal.createDimension(mesh_name+'_num_node', len(self.nodes))
            if self._edges is not None:
                nclocal.createDimension(mesh_name+'_num_edge', len(self.edges))
            if self._boundaries is not None:
                nclocal.createDimension(mesh_name+'_num_boundary',
                                        len(self.boundaries))
            if self._faces is not None:
                nclocal.createDimension(mesh_name+'_num_face', len(self.faces))
                nclocal.createDimension(mesh_name+'_num_vertices',
                                        self.faces.shape[1])
            nclocal.createDimension('two', 2)

            # Mesh topology.
            mesh = nclocal.createVariable(mesh_name, IND_DT, (),)
            mesh.cf_role = "mesh_topology"
            mesh.long_name = "Topology data of 2D unstructured mesh"
            mesh.topology_dimension = 2
            coord = "{0}_node_lon {0}_node_lat".format
            mesh.node_coordinates = coord(mesh_name)

            if self.edges is not None:
                # Attribute required if variables will be defined on edges.
                mesh.edge_node_connectivity = mesh_name+"_edge_nodes"
                if self.edge_coordinates is not None:
                    # Optional attribute (requires edge_node_connectivity).
                    coord = "{0}_edge_lon {0}_edge_lat".format
                    mesh.edge_coordinates = coord(mesh_name)
            if self.faces is not None:
                mesh.face_node_connectivity = mesh_name+"_face_nodes"
                if self.face_coordinates is not None:
                    # Optional attribute.
                    coord = "{0}_face_lon {0}_face_lat".format
                    mesh.face_coordinates = coord(mesh_name)
            if self.face_edge_connectivity is not None:
                # Optional attribute (requires edge_node_connectivity).
                mesh.face_edge_connectivity = mesh_name + "_face_edges"
            if self.face_face_connectivity is not None:
                # Optional attribute.
                mesh.face_face_connectivity = mesh_name + "_face_links"
            if self.boundaries is not None:
                mesh.boundary_node_connectivity = mesh_name+"_boundary_nodes"

            # FIXME: This could be re-factored to be more generic, rather than
            # separate for each type of data see the coordinates example below.
            if self.faces is not None:
                nc_create_var = nclocal.createVariable
                face_nodes = nc_create_var(mesh_name + "_face_nodes", IND_DT,
                                           (mesh_name + '_num_face',
                                            mesh_name + '_num_vertices'),)
                face_nodes[:] = self.faces

                face_nodes.cf_role = "face_node_connectivity"
                face_nodes.long_name = ("Maps every triangular face to "
                                        "its three corner nodes.")
                face_nodes.start_index = 0

            if self.edges is not None:
                nc_create_var = nclocal.createVariable
                edge_nodes = nc_create_var(mesh_name + "_edge_nodes", IND_DT,
                                           (mesh_name + '_num_edge', 'two'),)
                edge_nodes[:] = self.edges

                edge_nodes.cf_role = "edge_node_connectivity"
                edge_nodes.long_name = ("Maps every edge to the two "
                                        "nodes that it connects.")
                edge_nodes.start_index = 0

            if self.boundaries is not None:
                nc_create_var = nclocal.createVariable
                boundary_nodes = nc_create_var(mesh_name + "_boundary_nodes",
                                               IND_DT,
                                               (mesh_name + '_num_boundary',
                                                'two'),)
                boundary_nodes[:] = self.boundaries

                boundary_nodes.cf_role = "boundary_node_connectivity"
                boundary_nodes.long_name = ("Maps every boundary segment to "
                                            "the two nodes that it connects.")
                boundary_nodes.start_index = 0

            # Optional "coordinate variables."
            for location in ['face', 'edge', 'boundary']:
                loc = "{0}_coordinates".format(location)
                if getattr(self, loc) is not None:
                    for axis, ind in [('lat', 1), ('lon', 0)]:
                        nc_create_var = nclocal.createVariable
                        name = "{0}_{1}_{2}".format(mesh_name, location, axis)
                        dimensions = "{0}_num_{1}".format(mesh_name, location)
                        var = nc_create_var(name, NODE_DT,
                                            dimensions=(dimensions),)
                        loc = "{0}_coordinates".format(location)
                        var[:] = getattr(self, loc)[:, ind]
                        # Attributes of the variable.
                        var.standard_name = ("longitude" if axis == 'lon'
                                             else 'latitude')
                        var.units = ("degrees_east" if axis == 'lon'
                                     else 'degrees_north')
                        name = "Characteristics {0} of 2D mesh {1}".format
                        var.long_name = name(var.standard_name, location)

            # The node data.
            node_lon = nclocal.createVariable(mesh_name+'_node_lon',
                                              self._nodes.dtype,
                                              (mesh_name+'_num_node',),
                                              chunksizes=(len(self.nodes), ),
                                              # zlib=False,
                                              # complevel=0,
                                              )
            node_lon[:] = self.nodes[:, 0]
            node_lon.standard_name = "longitude"
            node_lon.long_name = "Longitude of 2D mesh nodes."
            node_lon.units = "degrees_east"

            node_lat = nclocal.createVariable(mesh_name+'_node_lat',
                                              self._nodes.dtype,
                                              (mesh_name+'_num_node',),
                                              chunksizes=(len(self.nodes), ),
                                              # zlib=False,
                                              # complevel=0,
                                              )
            node_lat[:] = self.nodes[:, 1]
            node_lat.standard_name = "latitude"
            node_lat.long_name = "Latitude of 2D mesh nodes."
            node_lat.units = "degrees_north"

            # Write the associated data.
            for dataset in self.data.values():
                if dataset.location == 'node':
                    shape = (mesh_name + '_num_node',)
                    coordinates = "{0}_node_lon {0}_node_lat".format(mesh_name)
                    chunksizes = (len(self.nodes), )
                elif dataset.location == 'face':
                    shape = (mesh_name + '_num_face',)
                    coord = "{0}_face_lon {0}_face_lat".format
                    coordinates = (coord(mesh_name) if self.face_coordinates
                                   is not None else None)
                    chunksizes = (len(self.faces), )
                elif dataset.location == 'edge':
                    shape = (mesh_name + '_num_edge',)
                    coord = "{0}_edge_lon {0}_edge_lat".format
                    coordinates = (coord(mesh_name) if self.edge_coordinates
                                   is not None else None)
                    chunksizes = (len(self.edges), )
                elif dataset.location == 'boundary':
                    shape = (mesh_name + '_num_boundary',)
                    coord = "{0}_boundary_lon {0}_boundary_lat".format
                    bcoord = self.boundary_coordinates
                    coordinates = (coord(mesh_name) if bcoord
                                   is not None else None)
                    chunksizes = (len(self.boundaries), )
                data_var = nclocal.createVariable(dataset.name,
                                                  dataset.data.dtype,
                                                  shape,
                                                  chunksizes=chunksizes,
                                                  # zlib=False,
                                                  # complevel=0,
                                                  )
                data_var[:] = dataset.data
                # Add the standard attributes:
                data_var.location = dataset.location
                data_var.mesh = mesh_name
                if coordinates is not None:
                    data_var.coordinates = coordinates
                # Add the extra attributes.
                for att_name, att_value in dataset.attributes.items():
                    setattr(data_var, att_name, att_value)
            nclocal.sync()
Example #13
0

def seed_everything(seed=427):
    random.seed(seed)
    np.random.seed(seed)
    os.environ['PYTHONHASHSEED'] = str(seed)
    torch.manual_seed(seed)


#     torch.set_deterministic(True)

seed_everything()

print("=" * 10 + " 1. Loading data " + "=" * 10)

SODA_train = ncDataset('/data/enso_round1_train_20210201/CMIP_train.nc')
SODA_label = ncDataset('/data/enso_round1_train_20210201/CMIP_label.nc')

total_sst = SODA_train.variables['sst'][:].data[:, :12]
total_t300 = SODA_train.variables['t300'][:].data[:, :12]
total_ua = SODA_train.variables['ua'][:].data[:, :12]
total_va = SODA_train.variables['va'][:].data[:, :12]

total_label = SODA_label.variables['nino'][:].data[:, 12:36]
sst_label = SODA_train.variables['sst'][:].data[:, 12:36]
t300_label = SODA_train.variables['t300'][:].data[:, 12:36]
ua_label = SODA_train.variables['ua'][:].data[:, 12:36]
va_label = SODA_train.variables['va'][:].data[:, 12:36]

total_ua = np.expand_dims(np.nan_to_num(total_ua), 2)
total_va = np.expand_dims(np.nan_to_num(total_va), 2)
Example #14
0
def create_topology(dataset_name, url, lat_var='lat', lon_var='lon'):
    try:
        #with s1:
        nc = ncDataset(url)
        if "SSMI" in nc.short_name:
            dataset_name = 'ssmi_topology'
            if os.path.isfile(os.path.join(config.topologypath, dataset_name+".nc")):
                return

        nclocalpath = os.path.join(config.topologypath, dataset_name+".nc.updating")


        nclocal = ncDataset(nclocalpath, mode="w", clobber=True)
        if nc.variables.has_key("nv"):
            logger.info("identified as fvcom")
            grid = 'False'

            nclocal.createDimension('cell', nc.variables['latc'].shape[0])#90415)
            nclocal.createDimension('node', nc.variables['lat'].shape[0])
            nclocal.createDimension('time', nc.variables['time'].shape[0])
            nclocal.createDimension('corners', nc.variables['nv'].shape[0])

            lat = nclocal.createVariable('lat', 'f', ('node',), chunksizes=nc.variables['lat'].shape, zlib=False, complevel=0)
            lon = nclocal.createVariable('lon', 'f', ('node',), chunksizes=nc.variables['lat'].shape, zlib=False, complevel=0)
            latc = nclocal.createVariable('latc', 'f', ('cell',), chunksizes=nc.variables['latc'].shape, zlib=False, complevel=0)
            lonc = nclocal.createVariable('lonc', 'f', ('cell',), chunksizes=nc.variables['latc'].shape, zlib=False, complevel=0)
            nv = nclocal.createVariable('nv', 'u8', ('corners', 'cell',), chunksizes=nc.variables['nv'].shape, zlib=False, complevel=0)

            time = nclocal.createVariable('time', 'f8', ('time',), chunksizes=nc.variables['time'].shape, zlib=False, complevel=0) #d
            logger.info("done creating")
            lontemp = nc.variables['lon'][:]
            lonctemp = nc.variables['lonc'][:]

            if np.max(lontemp) > 180:
                lontemp[lontemp > 180] = lontemp[lontemp > 180] - 360
                lon[:] = np.asarray(lontemp)
            #elif np.min(lontemp) < -180:
            #    print "lessthan"
            #    lon[:] = np.asarray(lontemp) + 360
            #    lonc[:] = np.asarray(nc.variables['lonc'][:] + 360)
            else:
                lon[:] = lontemp
            if np.max(lonctemp) > 180:
                lonctemp[lonctemp > 180] = lonctemp[lonctemp > 180] - 360
                lonc[:] = np.asarray(lonctemp)
            else:
                lonc[:] = lonctemp

            lat[:] = nc.variables['lat'][:]
            latc[:] = nc.variables['latc'][:]

            nv[:,:] = nc.variables['nv'][:,:]
            logger.info("done filling vars")
            # DECODE the FVCOM datetime string (Time) and save as a high precision datenum
            timestrs = nc.variables['Times'][:] #format: "2013-01-15T00:00:00.000000"
            dates = [datetime.strptime(timestrs[i, :].tostring().replace('\0', ""), "%Y-%m-%dT%H:%M:%S.%f") for i in range(len(timestrs[:,0]))]
            time[:] = date2num(dates, units=time_units)# use netCDF4's date2num function
            #time[:] = nc.variables['time'][:]
            logger.info("done time conversion")
            time.units = time_units
            #time.units = nc.variables['time'].units
            nclocal.sync()
            nclocal.grid = grid
            nclocal.sync()
            logger.info("data written to file")

        elif nc.variables.has_key("element"):
            logger.info("identified as adcirc")
            grid = 'False'
            nclocal.createDimension('node', nc.variables['x'].shape[0])
            nclocal.createDimension('cell', nc.variables['element'].shape[0])
            nclocal.createDimension('time', nc.variables['time'].shape[0])
            nclocal.createDimension('corners', nc.variables['element'].shape[1])

            lat = nclocal.createVariable('lat', 'f', ('node',), chunksizes=(nc.variables['x'].shape[0],), zlib=False, complevel=0)
            lon = nclocal.createVariable('lon', 'f', ('node',), chunksizes=(nc.variables['x'].shape[0],), zlib=False, complevel=0)
            latc = nclocal.createVariable('latc', 'f', ('cell',), chunksizes=(nc.variables['element'].shape[0],), zlib=False, complevel=0)
            lonc = nclocal.createVariable('lonc', 'f', ('cell',), chunksizes=(nc.variables['element'].shape[0],), zlib=False, complevel=0)
            #if nc.variables['element'].shape[0] == 3:
            #    nv = nclocal.createVariable('nv', 'u8', ('corners', 'cell',), chunksizes=nc.variables['element'].shape, zlib=False, complevel=0)
            #    nv[:,:] = nc.variables['element'][:,:]
            #else:
            nv = nclocal.createVariable('nv', 'u8', ('corners', 'cell',), chunksizes=nc.variables['element'].shape[::-1], zlib=False, complevel=0)

            time = nclocal.createVariable('time', 'f8', ('time',), chunksizes=nc.variables['time'].shape, zlib=False, complevel=0)

            lattemp = nc.variables['y'][:]
            lontemp = nc.variables['x'][:]
            lat[:] = lattemp
            lontemp[lontemp > 180] = lontemp[lontemp > 180] - 360

            lon[:] = lontemp
            import matplotlib.tri as Tri
            tri = Tri.Triangulation(lontemp,
                                    lattemp,
                                    nc.variables['element'][:,:]-1
                                    )

            lonc[:] = lontemp[tri.triangles].mean(axis=1)
            latc[:] = lattemp[tri.triangles].mean(axis=1)
            nv[:,:] = nc.variables['element'][:,:].T
            time[:] = nc.variables['time'][:]
            time.units = nc.variables['time'].units
            nclocal.sync()
            nclocal.grid = grid
            nclocal.sync()
            logger.info("data written to file")
        elif nc.variables.has_key("ele"):
            for varname in nc.variables.iterkeys():
                if "mesh" in varname:
                    meshcoords = nc.variables[varname].node_coordinates.split(" ")
                    lonname, latname = meshcoords[0], meshcoords[1]
            logger.info("identified as selfe")
            grid = 'False'
            nclocal.createDimension('node', nc.variables['x'].shape[0])
            nclocal.createDimension('cell', nc.variables['ele'].shape[1])
            nclocal.createDimension('time', nc.variables['time'].shape[0])
            nclocal.createDimension('corners', nc.variables['ele'].shape[0])

            lat = nclocal.createVariable('lat', 'f', ('node',), chunksizes=(nc.variables['x'].shape[0],), zlib=False, complevel=0)
            lon = nclocal.createVariable('lon', 'f', ('node',), chunksizes=(nc.variables['x'].shape[0],), zlib=False, complevel=0)
            latc = nclocal.createVariable('latc', 'f', ('cell',), chunksizes=(nc.variables['ele'].shape[1],), zlib=False, complevel=0)
            lonc = nclocal.createVariable('lonc', 'f', ('cell',), chunksizes=(nc.variables['ele'].shape[1],), zlib=False, complevel=0)
            #if nc.variables['element'].shape[0] == 3:
            #    nv = nclocal.createVariable('nv', 'u8', ('corners', 'cell',), chunksizes=nc.variables['element'].shape, zlib=False, complevel=0)
            #    nv[:,:] = nc.variables['element'][:,:]
            #else:
            nv = nclocal.createVariable('nv', 'u8', ('corners', 'cell',), chunksizes=nc.variables['ele'].shape, zlib=False, complevel=0)
            time = nclocal.createVariable('time', 'f8', ('time',), chunksizes=nc.variables['time'].shape, zlib=False, complevel=0)

            lattemp = nc.variables[latname][:]
            lontemp = nc.variables[lonname][:]
            lat[:] = lattemp
            lontemp[lontemp > 180] = lontemp[lontemp > 180] - 360

            lon[:] = lontemp
            import matplotlib.tri as Tri
            tri = Tri.Triangulation(lontemp,
                                    lattemp,
                                    nc.variables['ele'][:,:].T-1
                                    )

            lonc[:] = lontemp[tri.triangles].mean(axis=1)
            latc[:] = lattemp[tri.triangles].mean(axis=1)
            nv[:,:] = nc.variables['ele'][:,:]
            time[:] = nc.variables['time'][:]
            time.units = nc.variables['time'].units
            nclocal.sync()
            nclocal.grid = grid
            nclocal.sync()
            logger.info("data written to file")
        else:
            logger.info("identified as grid")
            latname, lonname = lat_var, lon_var
            if latname not in nc.variables:
                for key in nc.variables.iterkeys():
                    try:
                        nc.variables[key].__getattr__('units')
                        temp_units = nc.variables[key].units
                        if (not '_u' in key) and (not '_v' in key) and (not '_psi' in key): 
                            if 'degree' in temp_units:
                                if 'east' in temp_units:
                                    lonname = key
                                elif 'north' in temp_units:
                                    latname = key
                                else:
                                    raise ValueError("No valid coordinates found in source netcdf file")
                    except:
                        pass
            if nc.variables[latname].ndim > 1:
                igrid = nc.variables[latname].shape[0]
                jgrid = nc.variables[latname].shape[1]
                grid = 'cgrid'
            else:
                grid = 'rgrid'
                igrid = nc.variables[latname].shape[0]
                jgrid = nc.variables[lonname].shape[0]

            if "SSMI" in nc.short_name:
                latchunk, lonchunk = (igrid,), (jgrid,)
                nclocal.createDimension('lat', igrid)
                nclocal.createDimension('lon', jgrid)
            else:
                latchunk, lonchunk = (igrid,jgrid,), (igrid,jgrid,)
                logger.info("native grid style identified")
                nclocal.createDimension('igrid', igrid)
                nclocal.createDimension('jgrid', jgrid)

            if "SSMI" in nc.short_name:
                nclocal.createDimension('part_of_day', 2)
                time = nclocal.createVariable('time', 'f8', ('lon', 'lat', 'part_of_day',), chunksizes=(jgrid, igrid, 2), zlib=False, complevel=0)
            elif nc.variables.has_key("time"):
                nclocal.createDimension('time', nc.variables['time'].shape[0])
                if nc.variables['time'].ndim > 1:
                    time = nclocal.createVariable('time', 'f8', ('time',), chunksizes=(nc.variables['time'].shape[0],), zlib=False, complevel=0)
                else:
                    time = nclocal.createVariable('time', 'f8', ('time',), chunksizes=nc.variables['time'].shape, zlib=False, complevel=0)
            else:
                nclocal.createDimension('time', 1)
                time = nclocal.createVariable('time', 'f8', ('time',), chunksizes=(1,), zlib=False, complevel=0)

            if "SSMI" in nc.short_name:
                lat = nclocal.createVariable('lat', 'f', ('lat',), chunksizes=latchunk, zlib=False, complevel=0)
                lon = nclocal.createVariable('lon', 'f', ('lon',), chunksizes=lonchunk, zlib=False, complevel=0)
                grid = 'cgrid'
            else:
                lat = nclocal.createVariable('lat', 'f', ('igrid','jgrid',), chunksizes=latchunk, zlib=False, complevel=0)
                lon = nclocal.createVariable('lon', 'f', ('igrid','jgrid',), chunksizes=lonchunk, zlib=False, complevel=0)
            logger.info("variables created in cache")
            lontemp = nc.variables[lonname][:]
            lontemp[lontemp > 180] = lontemp[lontemp > 180] - 360

            if grid == 'rgrid':
                lon[:], lat[:] = np.meshgrid(lontemp, nc.variables[latname][:])
                grid = 'cgrid'
            else:
                lon[:] = lontemp
                lat[:] = nc.variables[latname][:]

            # if "SSMI" in nc.short_name:
            #     file_date_srt = nc.original_filename[nc.original_filename.index('_')+1:9+nc.original_filename.index('_')]

            if nc.variables.has_key("time"):
                if nc.variables['time'].ndim > 1:
                    if "SSMI" in nc.short_name:
                        time[:] = nc.variables['time'][:]
                        time.units = time_units
                    else:
                        _str_data = nc.variables['time'][:,:]
                        #print _str_data.shape, type(_str_data), "''", str(_str_data[0,:].tostring().replace(" ","")), "''"
                        dates = [parse(_str_data[i, :].tostring()) for i in range(len(_str_data[:,0]))]
                        # @param dates: A datetime object or a sequence of datetime objects.
                        time[:] = date2num(dates, time_units)
                        time.units = time_units
                else:
                    time[:] = nc.variables['time'][:]
                    time.units = nc.variables['time'].units
            else:
                time[:] = np.ones(1)
                time.units = time_units

            logger.info("data written to file")
            while not 'grid' in nclocal.ncattrs():
                nclocal.__setattr__('grid', 'cgrid')
                nclocal.sync()
            nclocal.sync()
            nclocal.close()
            nc.close()

        shutil.move(nclocalpath, nclocalpath.replace(".updating", ""))
        if not ((os.path.exists(nclocalpath.replace(".updating", "").replace(".nc",'_nodes.dat')) and os.path.exists(nclocalpath.replace(".updating", "").replace(".nc","_nodes.idx")))):
            #with s1:
            build_tree.build_from_nc(nclocalpath.replace(".updating", ""))
        if grid == 'False':
            if not os.path.exists(nclocalpath.replace(".updating", "")[:-3] + '.domain'):
                #with s2:
                create_domain_polygon(nclocalpath.replace(".updating", ""))

    except Exception as detail:
        exc_type, exc_value, exc_traceback = sys.exc_info()
        logger.error("Disabling Error: " +\
                                 repr(traceback.format_exception(exc_type, exc_value,
                                              exc_traceback)))
        try:
            nclocal.close()
        except:
            pass
        try:
            nc.close()
        except:
            pass 
        if os.path.exists(nclocalpath):
            os.unlink(nclocalpath)
        raise
Example #15
0
    def save_as_netcdf(self, filepath):
        """
        save the ugrid object as a netcdf file

        :param filepath: path to file you want o save to.
                         An existing one will be clobbered if it already exists.

        follows the convernsion established by the netcdf UGRID working group:

        http://publicwiki.deltares.nl/display/NETCDF/Deltares+CF+proposal+for+Unstructured+Grid+data+model

        """

        from netCDF4 import Dataset as ncDataset
        from netCDF4 import num2date, date2num
        # create a new netcdf file
        nclocal = ncDataset(filepath, mode="w", clobber=True)

        # dimensions:
        # nMesh2_node = 4 ; // nNodes
        # nMesh2_edge = 5 ; // nEdges
        # nMesh2_face = 2 ; // nFaces
        # nMesh2_face_links = 1 ; // nFacePairs


        nclocal.createDimension('num_nodes', len(self.nodes) )
        nclocal.createDimension('num_edges', len(self.edges) )
        nclocal.createDimension('num_faces', len(self.faces) )
        nclocal.createDimension('num_vertices', self.faces.shape[1] )
        nclocal.createDimension('two', 2)

        #mesh topology
        mesh = nclocal.createVariable('mesh', IND_DT, (), )
        mesh.cf_role = "mesh_topology" 
        mesh.long_name = "Topology data of 2D unstructured mesh" 
        mesh.topology_dimension = 2 
        mesh.node_coordinates = "node_lon node_lat" 
        mesh.face_node_connectivity = "mesh_face_nodes" 

        mesh.edge_node_connectivity = "mesh_edge_nodes"  ## attribute required if variables will be defined on edges
        mesh.edge_coordinates = "mesh_edge_lon mesh_edge_lat"  ## optional attribute (requires edge_node_connectivity)
        mesh.face_coordinates = "mesh_face_lon mesh_face_lat" ##  optional attribute
        mesh.face_edge_connectivity = "mesh_face_edges"  ## optional attribute (requires edge_node_connectivity)
        mesh.face_face_connectivity = "mesh_face_links"  ## optional attribute

        face_nodes = nclocal.createVariable("mesh_face_nodes",
                                            IND_DT,
                                            ('num_faces', 'num_vertices'),
                                            )
        face_nodes[:] = self.faces

        face_nodes.cf_role = "face_node_connectivity"
        face_nodes.long_name = "Maps every triangular face to its three corner nodes."
        face_nodes.start_index = 0 ;

        edge_nodes = nclocal.createVariable("mesh_edge_nodes",
                                            IND_DT,
                                            ('num_edges', 'two'),
                                            )
        edge_nodes[:] = self.edges

        edge_nodes.cf_role = "edge_node_connectivity"
        edge_nodes.long_name = "Maps every edge to the two nodes that it connects."
        edge_nodes.start_index = 0 ;

        node_lon = nclocal.createVariable('node_lon',
                                     self._nodes.dtype,
                                     ('num_nodes',),
                                     chunksizes=(len(self.nodes), ),
                                     #zlib=False,
                                     #complevel=0,
                                     )
        node_lon[:] = self.nodes[:,0]
        node_lon.standard_name = "longitude"
        node_lon.long_name = "Longitude of 2D mesh nodes."
        node_lon.units = "degrees_east"
        node_lat = nclocal.createVariable('node_lat',
                                     self._nodes.dtype,
                                     ('num_nodes',),
                                     chunksizes=(len(self.nodes), ),
                                     #zlib=False,
                                     #complevel=0,
                                     )
        node_lat[:] = self.nodes[:,1]
        node_lat.standard_name = "latitude"
        node_lat.long_name = "Latitude of 2D mesh nodes."
        node_lat.units = "degrees_north"

        # // Mesh node coordinates
        # double Mesh2_node_x(nMesh2_node) ;
        #         Mesh2_node_x:standard_name = "longitude" ;
        #         Mesh2_node_x:long_name = "Longitude of 2D mesh nodes." ;
        #         Mesh2_node_x:units = "degrees_east" ;
        nclocal.sync()