示例#1
0
 def __init__(self, ncfile, readertype='generic',  **kwargs):
     self.ncfile = ncfile
     if not isinstance(readertype, basestring):
         self.f = readertype(ncfile,  **kwargs)
         self.type = 'callable'
     elif readertype=='cdms2':
         self.f = cdms2.open(ncfile,  **kwargs)
         self.type = readertype
     elif readertype=='netcdf4':
         import netcdf4
         self.f = netcdf4.Dataset(ncfile,  **kwargs)
         self.type = readertype
     else:
         self.f = DS(ncfile, readertype, **kwargs)
         self.type = 'dataset'
示例#2
0
def test_obs_obsmanager_project_model():

    # Load manager
    manager = test_obs_obsmanager_init_surf()

    # Load model
    f = DS(NCFILE_MANGA0, 'mars', level='surf', logger_level='error')
    temp = f('temp')
    f.close()
    temp.id += '_surf'

    # Interpolate model
    otemp = manager.project_model(temp)
    assert_allclose(otemp[1][0], [12.91558515, 10.58179214])

    return manager
示例#3
0
def test_obs_ncobsplatform_surf():

    # Load and stack obs
    obs = NcObsPlatform(NCFILE_OBS_SURF,
                        logger=LOGGER,
                        lat=(45, 47.8),
                        norms=[0.2, 0.1])
    stacked = obs.stacked_data.copy()
    assert obs.lons.shape == (3, )
    assert obs.ns == 6
    assert obs.ndim == 1
    assert_allclose(obs.means, [0, 0], atol=1e-7)
    assert obs.depths == 'surf'
    assert_allclose(stacked, [2.5, 1.5, 4., 1., 1.5, 4.])

    # Named norms
    notnormed = obs.set_named_norms(temp=0.1)
    assert obs.norms == [0.1, 0.1]
    assert_allclose(obs.stacked_data[:3], 2 * stacked[:3])
    assert_allclose(obs.stacked_data[3:], stacked[3:])
    obs.set_named_norms(temp=0.2)

    # Interpolate model
    f = DS(NCFILE_MANGA0, 'mars', level=obs.depths, logger_level='error')
    temp = f('temp')
    sal = f('sal')
    f.close()
    temp.id += '_surf'
    sal.id += '_surf'
    otemp = obs.project_model(temp)
    osal = obs.project_model(sal)
    otem_true = [12.97311556, 12.91558515, 10.58179214]
    assert_allclose(otemp[0], otem_true)

    # Stack model
    otemp[:] -= 11.5
    osal[:] -= 35.5
    stacked_data = obs.restack([otemp, osal])
    assert stacked_data.shape == (6, 15)
    assert_allclose(stacked_data[:3, 0] * obs.norms[0] + 11.5, otem_true)

    return obs
示例#4
0
        except:
            parser.error('--%s: wrong argument' % selname)
    select[selname] = val

# Imports
from vcmq import DS, map2, MV2, data_sample

# The file
ncfile = options.ncfile
if not os.path.exists(ncfile):
    ncfile = data_sample(ncfile)
if not os.path.exists(ncfile):
    parser.error('File not found: ' + ncfile)

# Read temperature and depth
ds = DS(ncfile, options.model, **select)
mld = ds.get_mld(mode='deltatemp', squeeze=1)

# Time average (if needed)
if mld.ndim == 3:
    mld = MV2.average(mld, axis=0)

# Plot
map2(mld,
     proj='merc',
     figsize=(6, 6),
     autoresize=0,
     title=options.title,
     colorbar_shrink=0.7,
     right=1,
     show=options.disp,
示例#5
0
"""Test :meth:`~vacumm.data.misc.dataset.OceanDataset.get_temp` with ``at`` keyword and :meth:`~vacumm.data.misc.dataset.OceanDataset.get_temp_u`"""


# Inits
ncfile = "menor.nc"

# Imports
from vcmq import DS, data_sample

# Read data
ds = DS(data_sample(ncfile), 'mars', logger_level='critical')
uu = ds.get_u3d()
uv1 = ds.get_u3d(at='v')
uv2 = ds.get_u3d_v()

# For unittest
result = [
    ('assertNotEqual', [uu.mean(), uv1.mean()]), 
    ('assertEqual', [uv1.mean(), uv2.mean()]), 
    ('assertEqual', [uv1.id, "u3d_v"]), 
    ('assertEqual', [uv2.id, "u3d_v"]), 
]
示例#6
0
# Parameters
ncfile = "menor.nc"
lon = (3., 4.5)
lat = (42, 42.8)

# Imports
from vcmq import DS, map2, data_sample

# Read temperature and depth
ncfile = data_sample(ncfile)
ds = DS(ncfile, 'mars', lon=lon, lat=lat)
mld = ds.get_mld(mode='deltatemp', squeeze=1)

# Plot
map2(mld,
     proj='merc',
     figsize=(6, 6),
     autoresize=0,
     colorbar_shrink=0.7,
     right=1,
     savefigs=__file__,
     show=False,
     close=True)

print 'Done'
示例#7
0
"""Test :meth:`~vacumm.data.misc.dataset.OceanDataset.get_depth` in MENOR"""

# Inits
ncfile = "menor.nc"

# Imports
from vcmq import DS, os, map2, data_sample, code_file_name

# Read data
ncfile = data_sample(ncfile)
ds = DS(ncfile, 'mars', logger_level='critical')
depth = ds.get_depth(squeeze=True)

# Plot midlevel
figfile = code_file_name(ext='png')
if os.path.exists(figfile): os.remove(figfile)
map2(depth[depth.shape[0] / 2],
     savefig=figfile,
     close=True,
     vmax=0,
     show=False,
     colorbar_shrink=0.6,
     title='Testing Dataset.get_depth()')

# For unittest
result = [
    ('files', figfile),
]
示例#8
0
"""Test :meth:`~vacumm.data.misc.dataset.Dataset.get_var` on multiple files and time selections"""

# Inits
ncfiles = "mars2d.xyt.*.nc"

# Imports
from vcmq import DS, os, map2, data_sample

# Glob pattern
result = []
ds = DS(data_sample("mars2d.xyt.*.nc"),
        'mars',
        logger_level='critical',
        time=("2008-08-15 06", "2008-08-15 09"))
assert ds.get_ssh().shape == (4, 50, 50)
assert ds.get_ssh(time=("2008-08-15 06", "2008-08-15 07", 'co'),
                  squeeze=True).shape == (50, 50)

# With dates
ds = DS(data_sample("mars2d.xyt.%Y%m%d%H.nc"),
        'mars',
        logger_level='critical',
        time=("2008-08-15 06", "2008-08-15 09"))
assert ds.get_ssh(time=("2008-08-15 06", "2008-08-15 07",
                        'cc')).shape == (2, 50, 50)
示例#9
0
"""Test :meth:`~vacumm.data.misc.dataset.OceanDataset.get_depth` in MFS"""

# Inits
ncfile = "mfs.nc"

# Imports
from vcmq import DS, os, map2, data_sample, code_file_name, isdep

# Read data
ncfile = data_sample(ncfile)
ds = DS(ncfile, 'nemo', logger_level='critical')
depth = ds.get_depth(squeeze=True)

# For unittest
result = [
    (isdep, depth),
    ('assertTrue', depth[0] < depth[-1] < 0),
]
示例#10
0
#!/usr/bin/env python
# -*- coding: utf8 -*-
"""Exploitation generique de donnes grilles (:mod:`vacumm.data.misc.dataset`)"""

from vcmq import DS, data_sample, map2

# Initialisation
ds = DS(data_sample('menor.nc'), 'mars', lon=(4, 5), lat=(42.5, 43.5), log_level='debug')
print ds.dataset


# Logging
ds.info('Pratique')                                 # -> ESSAYER WARNING
ds.set_loglevel('debug')


# Coordonnees
lon = ds.get_lon()                                  # -> ESSAYER AU POINT U
grid = ds.get_grid()


# Variables
temp = ds.get_temp(level=slice(-1, None), squeeze=True)
sst = ds.get_sst(squeeze=True)                      # -> VERIFIER ATTRIBUTS
ds.info('Plot SST')
map2(sst)
sal = ds.get_sal(lon=(4., 4.5))
print temp.shape, sal.shape

# Generique
ds2 = DS(data_sample('mfs.nc'), 'nemo', lon=(4, 5), lat=(42.5, 43.5))
示例#11
0
def load_model_at_regular_dates(ncpat,
                                varnames=None,
                                time=None,
                                lat=None,
                                lon=None,
                                level=None,
                                depths=None,
                                modeltype='mars',
                                nt=50,
                                dtfile=None,
                                sort=True,
                                asdict=False,
                                logger=None,
                                **kwargs):
    """Read model output at nearest unique dates with optional linear interpolation


    Parameters
    ----------
    ncpat: string or list of strings
    varnames: string, strings
        Generic var names. If None, all variables that are known from the
        :mod:`vacumm.data.cf` module are used.
    level: string, None, list of floats, array, tuple of them, dict
        Here are some possible values:

        - "surf" or "bottom": self explenatory
        - None or "3d": No slice, so get all levels with no interpolation.
        - A list or array of negative depths: get all levels and
          interpolate at these depths.

        Variables sliced with "surf" and "bottom" are returned with
        an id suffixed with "_surf" or "_bottom".
        You can speficy different slicings  using a tuple
        of depth specifications.
        You can specialise slicings of a variable using a dictionary with
        the key as the variable name.

    See also
    --------
    :func:`sonat.misc.list_files_from_pattern` for more options


    Examples
    --------
    >>> mdict = load_model_at_regular_dates('myfile.nc', level='surf')
    >>> mdict = load_model_at_regular_dates('myfile.nc', level=('surf', 'bottom')
    >>> mdict = load_model_at_regular_dates('myfile.nc', varnames=['temp', 'sal'],
        level={'temp':('surf', 'bottom'), 'sal':[-50, -10]})
    >>> mdict = load_model_at_regular_dates('myfile.nc', varnames=['temp', 'sal'],
        level={'temp':('surf', '3d'), 'sal':None}, depths=[-50, -10])


    """
    # Logger
    kwlog = kwfilter(kwargs, 'logger_')
    if logger is None:
        logger = get_logger(**kwlog)
    logger.debug('Loading model at regular dates')

    # Get file list
    ncfiles = list_files_from_pattern(ncpat, time, dtfile=dtfile, sort=True)
    if not ncfiles:
        raise SONATError('No file found')

    # Time interval
    reqtime = time
    if time is None:

        # First
        taxis = ncget_time(ncfiles[0])
        if taxis is None:
            raise SONATError("Can't get time axis for: " + ncfiles[0])
        ctimes = taxis.asComponentTime()
        ct0 = ctimes[0]

        # Last
        if ncfiles[0] != ncfiles[-1]:
            taxis = ncget_time(ncfiles[-1])
            if taxis is None:
                raise SONATError("Can't get time axis for: " + ncfiles[-1])
            ctimes = taxis.asComponentTime()
        ct1 = ctimes[-1]

        # Time
        time = (ct0, ct1)

    # Generate dates
    dates = lindates(time[0], time[1], nt)

    # Get time indices
    iidict, iiinfo = ncfiles_time_indices(ncfiles,
                                          dates,
                                          getinfo=True,
                                          asslices=True)
    if iiinfo['missed'] or iiinfo['duplicates']:
        msg = ("You must provide at least {nt} model time steps to read "
               "independant dates")
        if reqtime:
            msg = msg + (", and your requested time range must be enclosed "
                         "by model time range.")
        raise SONATError(msg)

    # Read
    single = isinstance(varnames, basestring)
    if single:
        varnames = [varnames]
    out = OrderedDict()
    vlevels = {}
    if not isinstance(level, dict):
        level = {'__default__': level}
    for ncfile, tslices in iidict.items():

        # Dataset instance
        ds = DS(ncfile,
                modeltype,
                logger_name='SONAT.Dataset',
                logger_level='error')

        # List of well known variables
        if varnames is None:
            varnames = []
            for ncvarname in ds.get_variable_names():
                varname = match_known_var(ds[0][ncvarname])
                if varname:
                    varnames.append(varname)

        # Loop on variables
        vardepth = None
        kwvar = dict(lat=lat, lon=lon, verbose=False, bestestimate=False)
        for vname in list(varnames):

            # Level selector for this variable
            if vname in vlevels:  # cached
                vlevel = vlevels[vname]
            else:
                vlevel = interpret_level(dicttree_get(level, vname),
                                         astuple=True)
                vlevels[vname] = vlevel  # cache it

            # Loop on level specs
            for vlev in vlevel:

                # Output vname and vlev check
                if not isinstance(vlev, basestring):
                    vnameo = vname
                elif vlev not in ('surf', "bottom", "3d"):
                    raise SONATError('Depth string must one of '
                                     'surf, bottom, 3d')
                elif vlev != '3d':
                    vnameo = vname + '_' + vlev
                else:
                    vlev = None
                    vnameo = vname

                # Slicing level and output depths
                if vlev not in ['surf', 'bottom']:

                    # numeric so interpolation
                    if vlev is None:
                        vdep = depths if depths is not None else None
                    else:
                        vdep = vlev
                    interp = vdep is not None
                    if interp:
                        vlev = None

                else:

                    interp = False

                # Read and aggregate
                vout = out.setdefault(vnameo, [])
                #                vinterp = None
                for tslice in tslices:

                    # Get var
                    kwvar['time'] = tslice
                    var = ds(vname, level=vlev, **kwvar)

                    # Interpolate at numeric depths
                    if interp and var.getLevel() is not None:

                        # Get depths
                        if True or vardepth is None:  #FIXME: bad to always read it
                            vardepth = ds.get_depth(level=vlev,
                                                    zerolid=True,
                                                    **kwvar)

                        # Interpolate
                        var = ds._interp_at_depths_(var,
                                                    vardepth,
                                                    vdep,
                                                    extrap='top')

                    # Id with suffix
                    var.id = vnameo

                    # Store results
                    vout.append(var)

    # Concatenate
    for vname, vout in out.items():
        out[vname] = MV2_concatenate(vout)

    # Dict
    if asdict:
        return out

    # Single
    out = out.values()
    if single:
        return out[0]
    return out
示例#12
0
    if RE_MATCH_TIME(arg) and not arg.isdigit():  # date
        return arg
    if re.match('[\w\s]*:[\w\s]*(:[\w\s]*)?$', arg):  # args to slice
        return slice(*[(None if a == '' else eval(a)) for a in arg.split(':')])
    arg = eval(arg)
    if isinstance(arg, int):  # slice at a single index
        return slice(arg, (arg + 1) if arg != -1 else None)
    if isinstance(arg, float):  # selection of a single position
        return (arg, arg, 'cob')
    return arg


# Open reader or file
if args.generic:
    f = DS(args.ncfile,
           args.generic,
           logger_level='info' if args.verbose else 'error')
else:
    f = cdms2.open(args.ncfile)

# Find a variable name if not provided
if not args.var:
    if args.generic:
        allvars = f.dataset[0].listvariables()
    else:
        allvars = f.listvariables()
    for var in allvars:
        if 'bounds' not in var:
            if args.generic:
                var = '+' + var
            args.var = [var]
示例#13
0
#!/usr/bin/env python
# -*- coding: utf8 -*-
"""Diagnostics thermodynamiques et dynamiques (:mod:`vacumm.diag.thermdyn` et :mod:`vacumm.diag.dynamics`)"""

from vcmq import DS, data_sample, map2, density, mixed_layer_depth,  barotropic_geostrophic_velocity, kinetic_energy, shapiro2d

# Lecture des données de base
ds = DS(data_sample('menor.nc'))
temp = ds.get_temp(squeeze=1)
sal = ds.get_sal(squeeze=1)
depth = ds.get_depth(squeeze=1)
ssh = ds.get_ssh(squeeze=1)


# Densité
dens_surf = density(temp[-1], sal[-1])                      # -> CALCULER DENSITE 3D
map2(dens_surf)


# Couche mélangée
mld = mixed_layer_depth((temp, sal), depth, mode='deltadens') # -> ESSAYER AUTRES MODES
map2(mld, vmax=600.)


# Vitesse geostrophique
# - vitesses
ug, vg = barotropic_geostrophic_velocity(ssh)
# - energie cinetique
ke = kinetic_energy((ug, vg))                                 # -> TESTER AVEC SSH
ke.long_name = 'Surface geostrophique kinetic energy'
ke = shapiro2d(ke)