Exemplo n.º 1
0
def modify_filter(gridfilename, ttlname, indflag=1):

    tm = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))

    ncfile = Dataset(gridfilename, 'a')

    if indflag:
        grid_center_lat_var = ncfile.variables['grid_center_lat']
        setattr(grid_center_lat_var, 'units', 'degrees')
        grid_center_lon_var = ncfile.variables['grid_center_lon']
        setattr(grid_center_lon_var, 'units', 'degrees')
        grid_corner_lat_var = ncfile.variables['grid_corner_lat']
        setattr(grid_corner_lat_var, 'units', 'degrees')
        grid_corner_lon_var = ncfile.variables['grid_corner_lon']
        setattr(grid_corner_lon_var, 'units', 'degrees')

    setattr(ncfile, 'title', ttlname)
    setattr(ncfile, 'modifydate', tm)

    if hasattr(ncfile, 'grid_name'):
        delattr(ncfile, 'grid_name')

    if hasattr(ncfile, 'map_method'):
        delattr(ncfile, 'map_method')

    ncfile.sync()
    ncfile.close()
Exemplo n.º 2
0
def modify_filter(gridfilename, ttlname, indflag = 1):

  tm = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))

  ncfile = Dataset(gridfilename, 'a')

  if indflag:
    grid_center_lat_var = ncfile.variables['grid_center_lat']
    setattr(grid_center_lat_var, 'units', 'degrees')
    grid_center_lon_var = ncfile.variables['grid_center_lon']
    setattr(grid_center_lon_var, 'units', 'degrees')
    grid_corner_lat_var = ncfile.variables['grid_corner_lat']
    setattr(grid_corner_lat_var, 'units', 'degrees')
    grid_corner_lon_var = ncfile.variables['grid_corner_lon']
    setattr(grid_corner_lon_var, 'units', 'degrees')
  
  setattr(ncfile, 'title', ttlname)
  setattr(ncfile, 'modifydate', tm)

  if hasattr(ncfile, 'grid_name'):
    delattr(ncfile, 'grid_name')

  if hasattr(ncfile, 'map_method'):
    delattr(ncfile, 'map_method')

  ncfile.sync()
  ncfile.close()
Exemplo n.º 3
0
def writeNcFile(data, fileName=None, oldStyle=1):
    if not ncOk:
        raise Exception('module Scientific.IO.NetCDF not found, writeNcFile() failed!')
    if not fileName:
        fileName = data['name']+'_weather.nc'
    f = NetCDFFile(fileName, 'w')
    f.createDimension('time', data['time'].shape[0])
    f.file_format = file_format
    if oldStyle:
        f.createDimension('scalar', 1)
    if data.has_key('comment'):
        f.comment = data['comment']
    else:
        f.comment = 'created by MeteonormFile.py (v%s)' % version
    if data.has_key('source_file'):
        f.source_file = str(data['source_file'])
    for vn in ('latitude', 'longitude', 'height'):
        setattr(f, vn, data[vn])
        if oldStyle:
            v = f.createVariable(vn, 'd', ('scalar', ))
            v[:] = [data[vn]]
    setattr(f, 'longitude_0', 15.0*data['timezone'])
    if oldStyle:
        v = f.createVariable('longitude_0', 'd', ('scalar', ))
        v[:] = [15.0 * data['timezone']]
    for vn in variables.keys():
        t = variables[vn][1]
        v = f.createVariable(vn, t, ('time',))
        v[:] = data[vn].astype(t)
        oname = variables[vn][0]
        if oname.startswith('<'): oname = oname[1:]
        if oname.endswith('>'): oname = oname[:-1]
        v.original_name = oname
        v.unit = variables[vn][2]
    f.sync()
    f.close()
Exemplo n.º 4
0
class _ParNetCDFFile(ParBase):

    """
    Distributed netCDF file

    A ParNetCDFFile object acts as much as possible like a NetCDFFile object.
    Variables become ParNetCDFVariable objects, which behave like
    distributed sequences. Variables that use the dimension named by
    |split_dimension| are automatically distributed among the processors
    such that each treats only one slice of the whole file.
    """

    def __parinit__(self, pid, nprocs, filename, split_dimension,
                    mode = 'r', local_access = False):
        """
        @param filename: the name of the netCDF file
        @type filename: C{str}
        @param split_dimension: the name of the dimension along which the data
                                is distributed over the processors
        @type split_dimension: C{str}
        @param mode: read ('r'), write ('w'), or append ('a')
        @type mode: C{str}
        @param local_access: if C{False}, processor 0 is the only one to
                             access the file, all others communicate with
                             processor 0. If C{True} (only for reading), each
                             processor accesses the file directly. In the
                             latter case, the file must be accessible on all
                             processors under the same name. A third mode is
                             'auto', which uses some heuristics to decide
                             if the file is accessible everywhere: it checks
                             for existence of the file, then compares
                             the size on all processors, and finally verifies
                             that the same variables exist everywhere, with
                             identical names, types, and sizes.
        @type local_access: C{bool} or C{str}
        """
        if mode != 'r':
            local_access = 0
        self.pid = pid
        self.nprocs = nprocs
        self.filename = filename
        self.split = split_dimension
        self.local_access = local_access
        self.read_only = mode == 'r'
        if local_access or pid == 0:
            self.file = NetCDFFile(filename, mode)
            try:
                length = self.file.dimensions[split_dimension]
                if length is None:
                    length = -1
            except KeyError:
                length = None
            variables = {}
            for name, var in self.file.variables.items():
                variables[name] = (name, var.dimensions)
                if length < 0 and split_dimension in var.dimensions:
                    index = list(var.dimensions).index(split_dimension)
                    length = var.shape[index]
        else:
            self.file = None
            self.split = split_dimension
            length = None
            variables = None
        if not local_access:
            length = self.broadcast(length)
            variables = self.broadcast(variables)
        if length is not None:
            self._divideData(length)
        self.variables = {}
        for name, var in variables.items():
            self.variables[name] = _ParNetCDFVariable(self, var[0], var[1],
                                                      split_dimension)

    def __repr__(self):
        return repr(self.filename)

    def close(self):
        if self.local_access or self.pid == 0:
            self.file.close()

    def createDimension(self, name, length):
        if name == self.split:
            if length is None:
                raise ValueError("Split dimension cannot be unlimited")
            self._divideData(length)
        if self.pid == 0:
            self.file.createDimension(name, length)

    def createVariable(self, name, typecode, dimensions):
        if self.pid == 0:
            var = self.file.createVariable(name, typecode, dimensions)
            dim = var.dimensions
        else:
            dim = 0
        name, dim = self.broadcast((name, dim))
        self.variables[name] = _ParNetCDFVariable(self, name, dim, self.split)
        return self.variables[name]

    def _divideData(self, length):
        chunk = (length+self.nprocs-1)/self.nprocs
        self.first = min(self.pid*chunk, length)
        self.last = min(self.first+chunk, length)
        if (not self.local_access) and self.pid == 0:
            self.parts = []
            for pid in range(self.nprocs):
                first = pid*chunk
                last = min(first+chunk, length)
                self.parts.append((first, last))

    def sync(self):
        if self.pid == 0:
            self.file.sync()
    flush = sync
Exemplo n.º 5
0
for i, pol in enumerate(polluants):
    data = dcrs[i] - (dsrs[i] - dsss[i])
    dcss.append(data)
    print " > %s (min = %.1f, max = %1.f)" % (pol, data.min(), data.max())
del dsrs, dsss, dcrs

# Enregistrement des résultats
print "enregistrement des données"
if fncs[-3:] != '.nc': fncs = "%s.nc" % fncs
ncf = NetCDFFile(fncs, 'w')

# dimensions
ncf.createDimension('Time', None)
ncf.createDimension('DateStrLen', 19)
ncf.createDimension('Point', np)
ncf.sync()
print " > dimensions"

# variables annexes
ncf.createVariable('Times', 'c', ('Time', 'DateStrLen'))
ncf.createVariable('area_pts', 'f', ('Point', ))
ncf.createVariable('easting_pts', 'f', ('Point', ))
ncf.createVariable('northing_pts', 'f', ('Point', ))
ncf.createVariable('x_pts', 'f', ('Point', ))
ncf.createVariable('y_pts', 'f', ('Point', ))
ncf.sync()
print " > variables"

nctimes = ncf.variables['Times']
nctimes.assignValue(times)
setattr(nctimes, 'long_name', 'date au format CHIMERE/WRF')
Exemplo n.º 6
0
class _ParNetCDFFile(ParBase):
    """
    Distributed netCDF file

    A ParNetCDFFile object acts as much as possible like a NetCDFFile object.
    Variables become ParNetCDFVariable objects, which behave like
    distributed sequences. Variables that use the dimension named by
    |split_dimension| are automatically distributed among the processors
    such that each treats only one slice of the whole file.
    """
    def __parinit__(self,
                    pid,
                    nprocs,
                    filename,
                    split_dimension,
                    mode='r',
                    local_access=False):
        """
        @param filename: the name of the netCDF file
        @type filename: C{str}
        @param split_dimension: the name of the dimension along which the data
                                is distributed over the processors
        @type split_dimension: C{str}
        @param mode: read ('r'), write ('w'), or append ('a')
        @type mode: C{str}
        @param local_access: if C{False}, processor 0 is the only one to
                             access the file, all others communicate with
                             processor 0. If C{True} (only for reading), each
                             processor accesses the file directly. In the
                             latter case, the file must be accessible on all
                             processors under the same name. A third mode is
                             'auto', which uses some heuristics to decide
                             if the file is accessible everywhere: it checks
                             for existence of the file, then compares
                             the size on all processors, and finally verifies
                             that the same variables exist everywhere, with
                             identical names, types, and sizes.
        @type local_access: C{bool} or C{str}
        """
        if mode != 'r':
            local_access = 0
        self.pid = pid
        self.nprocs = nprocs
        self.filename = filename
        self.split = split_dimension
        self.local_access = local_access
        self.read_only = mode == 'r'
        if local_access or pid == 0:
            self.file = NetCDFFile(filename, mode)
            try:
                length = self.file.dimensions[split_dimension]
                if length is None:
                    length = -1
            except KeyError:
                length = None
            variables = {}
            for name, var in self.file.variables.items():
                variables[name] = (name, var.dimensions)
                if length < 0 and split_dimension in var.dimensions:
                    index = list(var.dimensions).index(split_dimension)
                    length = var.shape[index]
        else:
            self.file = None
            self.split = split_dimension
            length = None
            variables = None
        if not local_access:
            length = self.broadcast(length)
            variables = self.broadcast(variables)
        if length is not None:
            self._divideData(length)
        self.variables = {}
        for name, var in variables.items():
            self.variables[name] = _ParNetCDFVariable(self, var[0], var[1],
                                                      split_dimension)

    def __repr__(self):
        return repr(self.filename)

    def close(self):
        if self.local_access or self.pid == 0:
            self.file.close()

    def createDimension(self, name, length):
        if name == self.split:
            if length is None:
                raise ValueError("Split dimension cannot be unlimited")
            self._divideData(length)
        if self.pid == 0:
            self.file.createDimension(name, length)

    def createVariable(self, name, typecode, dimensions):
        if self.pid == 0:
            var = self.file.createVariable(name, typecode, dimensions)
            dim = var.dimensions
        else:
            dim = 0
        name, dim = self.broadcast((name, dim))
        self.variables[name] = _ParNetCDFVariable(self, name, dim, self.split)
        return self.variables[name]

    def _divideData(self, length):
        chunk = (length + self.nprocs - 1) / self.nprocs
        self.first = min(self.pid * chunk, length)
        self.last = min(self.first + chunk, length)
        if (not self.local_access) and self.pid == 0:
            self.parts = []
            for pid in range(self.nprocs):
                first = pid * chunk
                last = min(first + chunk, length)
                self.parts.append((first, last))

    def sync(self):
        if self.pid == 0:
            self.file.sync()

    flush = sync
Exemplo n.º 7
0
                    'set_var_chunk_cache', 'setncattr', 'shape', 'size')) or
                    ('_' in a)):  # don't copy these
                setattr(newVar, a, getattr(thevar, a))

        # Loop over time, and vert levels if needed, interpolating each 2d field for this variable
        for t in range(0, time_length):
            if 'nVertLevels' in thevar.dimensions:
                for v in range(0, vert_levs):
                    originalvalue = thevar[t, :, v]
                    # Create an interpolator object for this variable
                    interpolator = matplotlib.delaunay.NNInterpolator(
                        triang, originalvalue, default_value=0.0)
                    # use the object to regrid
                    varGridded = interpolator[ymin:ymax:ny * 1j,
                                              xmin:xmax:nx * 1j]
                    newVar[t, :, :, v] = varGridded[:, :]
            else:
                originalvalue = thevar[t, :]
                # Create an interpolator object for this variable
                interpolator = matplotlib.delaunay.NNInterpolator(
                    triang, originalvalue, default_value=0.0)
                # use the object to regrid
                varGridded = interpolator[ymin:ymax:ny * 1j, xmin:xmax:nx * 1j]
                newVar[t, :, :] = varGridded[:, :]

print 'Saved re-gridded fields to: ', fileoutname

fileout.sync()
filein.close()
fileout.close()