Пример #1
0
#!/usr/bin/env python
# -*- coding: utf8 -*-
"""Filtres 1D et 2D"""

from vcmq import cdms2, curve2, map2, generic1d, generic2d, shapiro2d, bartlett1d, data_sample, curve2, shapiro1d, gaussian1d, gaussian2d

# Lectures
f = cdms2.open(data_sample('mars3d.t.nc'))
sst1d = f('temp')
f.close()
f = cdms2.open(data_sample('menor.nc'))
sst2d = f('TEMP', time=slice(0, 1), level=slice(-1, None), squeeze=1)
f.close()

# 1D

# - filtrages
sst1d_gen13 = generic1d(sst1d, 13)
sst1d_gen3 = generic1d(sst1d, [1., 1., 1.])
sst1d_sha = shapiro1d(sst1d)  # -> SHAPIRO AVEC GENERIC1D
sst1d_bar13 = bartlett1d(sst1d, 13)
# -> TESTEZ GAUSSIEN SUR 13 HEURES

# - plots
curve2(sst1d, 'k', label='Original', show=False, figsize=(12, 5))
curve2(sst1d_gen13, 'r', label='Generic 13 pts', show=False)
curve2(sst1d_gen3, 'g', label='Generic 3 pts', show=False)
curve2(sst1d_sha, 'b', label='shapiro', show=False)
curve2(sst1d_bar13, 'm', label='Bartlett', legend=True)

# -> MASQUEZ UNE PARTIE DES DONNEES ET JOUEZ AVEC LE PARAMETRE MASK=
Пример #2
0
# -*- coding: utf8 -*-
# Lecture et masquage de Hs
from vcmq import cdms2, MV2, data_sample, code_base_name

f = cdms2.open(data_sample("swan.four.nc"))
lon = f("longitude")
lat = f("latitude")
missing_value = f["HS"]._FillValue
hs = f("HS", squeeze=1)
f.close()
hs[:] = cdms2.MV.masked_object(hs, missing_value, copy=0)

# Trace sur grille irrégulière
from vacumm.misc.plot import map2

map2(
    hs,
    xaxis=lon,
    yaxis=lat,
    figsize=(6, 4),
    long_name="Significant wave height",
    fill="pcolormesh",
    contour=False,
    savefigs=code_base_name(ext="png"),
    left=0.12,
    right=1,
    show=False,
)
Пример #3
0
from vcmq import cdms2, P, curve2, savefigs, data_sample
from vacumm.tide.filters import demerliac, godin
from vacumm.tide.filters import extrema, zeros
from vacumm.tide.marigraph import Marigraph
from vacumm.tide.station_info import StationInfo

# Stations
station = StationInfo('Brest')
print station.attributes()
print station.name, station.longitude
print 'Niveau moyen a Brest:', station.nm


# Read sea level at Brest
f = cdms2.open(data_sample("tide.sealevel.BREST.mars.nc"))
sea_level = f('sea_level')
f.close()


# Surcotes/decotes
cotes, tide = demerliac(sea_level, get_tide=True)            # -> ESSAYER GODIN
kwp = dict(date_fmt='%b', date_locator='month')
curve2(sea_level, 'b', show=False, figsize=(15, 4), **kwp)
curve2(tide, 'r', **kwp)
curve2(cotes, figsize=(15, 4), **kwp)


# Extremas
slzoom1 = sea_level(('2006-10-01', '2006-10-02'))[::4] # Toutes les heures
bm, pm = extrema(slzoom1, spline=True, ref='mean')           # -> SANS SPLINES
Пример #4
0
"""Test :meth:`~vacumm.misc.core_plot.Plot2D.plot_streamplot`"""

from vcmq import map2, data_sample, cdms2, code_file_name
import warnings
warnings.filterwarnings('ignore', 'Warning: converting a masked element to nan')

f = cdms2.open(data_sample('mars2d.xyt.nc'))
u = f('u', slice(0, 1), squeeze=1)
v = f('v', slice(0, 1), squeeze=1)
f.close()

map2((u, v), fill=False, contour=False, streamplot=True, streamplot_density=3, 
    streamplot_linewidth='modulus', streamplot_lwmod=3, streamplot_color='modulus', 
    title='Tidal stream lines', colorbar_shrink=0.7, savefig=code_file_name(), 
    right=1, show=False)
Пример #5
0
from vcmq import data_sample, map, cdms2

# Read
import cdms2
f = cdms2.open(data_sample('mars3d.xy.nc'))
sst = f('temp',
        time=slice(0, 1),
        lat=(47.8, 48.6),
        lon=(-5.2, -4.25),
        squeeze=1)
f.close()

# Plot
m = map(sst,
        title='SST in the Iroise Sea',
        linewidth=.7,
        fill='pcolormesh',
        cmap='cmocean_thermal',
        clabel=True,
        vmin=9,
        top=.9,
        figsize=(6, 5),
        clabel_glow=True,
        clabel_glow_alpha=.4,
        colorbar_shrink=.8,
        show=False,
        savefigs=__file__,
        close=True)
from vcmq import cdms2, data_sample, N, cdtime, curve2, round_date, create_time, lindates, regrid1d, add_key, P
from vacumm.misc.grid._interp_ import cellerr1d
from scipy.stats import linregress

# Read data
f = cdms2.open(data_sample("radial_speed.nc"))
sp = f("speed")
spe = f("speed_error")
f.close()

# Create hourly time axis
taxi = sp.getTime()
taxi.toRelativeTime("hours since 2000")
ctimesi = taxi.asComponentTime()
ct0 = round_date(ctimesi[0], "hour")
ct1 = round_date(ctimesi[-1], "hour")
taxo = create_time(lindates(ct0, ct1, 1, "hour"), taxi.units)

# Lag error
# - estimation
els = []
lags = N.arange(1, 6)
for lag in lags:
    els.append(N.sqrt(((sp[lag:] - sp[:-lag]) ** 2).mean()))
els = N.array(els)
a, b, _, _, _ = linregress(lags, els)
# - plot
P.figure(figsize=(6, 6))
P.subplot(211)
P.plot(lags, els, "o")
P.plot([0, lags[-1]], [b, a * lags[-1] + b], "g")
Пример #7
0
from vcmq import data_sample, map, cdms2

# Read
import cdms2
f = cdms2.open(data_sample('mars3d.xy.nc'))
sst = f('temp', time=slice(0, 1),  lat=(47.8, 48.6), lon=(-5.2, -4.25),  squeeze=1)
f.close()

# Plot
m = map(sst, title='SST in the Iroise Sea', linewidth=.7,
    fill='pcolormesh', cmap='cmocean_thermal', clabel=True,
    vmin=9, top=.9, figsize=(6, 5), clabel_glow=True, clabel_glow_alpha=.4,
    colorbar_shrink=.8, show=False, savefigs=__file__)
Пример #8
0
# -*- coding: utf8 -*-
# %% Imports
from vcmq import cdms2, MV2, data_sample, code_base_name, map2

# %% Read Hs
f = cdms2.open(data_sample('swan.four.nc'))
lon = f('longitude')
lat = f('latitude')
hs = f('HS', squeeze=1)
f.close()

# %% Plot it specifying its irregular grid
map2(hs,
     xaxis=lon,
     yaxis=lat,
     figsize=(6, 4),
     cmap='cmocean_amp',
     long_name='Significant wave height',
     fill='pcolormesh',
     contour=False,
     savefigs=code_base_name(ext='png'),
     left=.12,
     right=1,
     show=False)
from vcmq import cdms2, data_sample, N, cdtime, curve2,round_date, create_time, \
    lindates, regrid1d, add_key
from vacumm.misc.grid._interp_ import cellerr1d
from scipy.stats import linregress

# Read data
f = cdms2.open(data_sample('radial_speed.nc'))
sp = f('speed')
spe = f('speed_error')
f.close()

# Create hourly time axis
taxi = sp.getTime()
taxi.toRelativeTime('hours since 2000')
ctimesi = taxi.asComponentTime()
ct0 = round_date(ctimesi[0], 'hour')
ct1 = round_date(ctimesi[-1], 'hour')
taxo = create_time(lindates(ct0, ct1, 1, 'hour'), taxi.units)

# Lag error
# - estimation
els = []
lags = N.arange(1, 6)
for lag in lags:
    els.append(N.sqrt(((sp[lag:]-sp[:-lag])**2).mean()))
els = N.array(els)
a, b, _, _, _ = linregress(lags, els)
# - plot
P.figure(figsize=(6, 6))
P.subplot(211)
P.plot(lags, els, 'o')
Пример #10
0
def generate_pseudo_ensemble(ncpat,
                             varnames=None,
                             nrens=50,
                             enrich=2.,
                             norms=None,
                             getmodes=False,
                             logger=None,
                             asdicts=False,
                             anomaly=True,
                             ncensfile=None,
                             **kwargs):
    """Generate a static pseudo-ensemble from a single simulation


    Parameters
    ----------
    ncpat: string
        netcdf file name or pattern
    nrens: int
        Ensemble size
    enrich: float
        Enrichment factor
    getmodes: bool
        Get also EOFs end eigen values
    **kwargs:
        Extra parameters are passed to :func:`load_model_at_dates`

    Return
    ------
    list (or dict) of arrays:
        variables with their name as keys
    dict: eofs, ev and variance, optional
        eofs: list (or dict) of arrays(nmodes, ...), optional
            EOFs
        ev: array(nmodes), optional
            Eigen values
        var: array
            Variance

    """
    # Logger
    kwlog = kwfilter(kwargs, 'logger_')
    if logger is None:
        logger = get_logger(**kwlog)
    logger.verbose('Generating pseudo-ensemble')

    # Ensembe size
    enrich = max(enrich, 1.)
    nt = int(nrens * enrich)
    logger.debug(
        ' enrich={enrich},  nt={nt}, ncpat={ncpat}, varnames={varnames}'.
        format(**locals()))

    # Read variables
    logger.debug('Reading the model at {} dates'.format(nt))
    data = load_model_at_regular_dates(ncpat,
                                       varnames=varnames,
                                       nt=nt,
                                       asdict=False,
                                       **kwargs)
    single = not isinstance(data, list)

    # Norms
    if isinstance(norms, dict):
        norms = var_prop_dict2list(data, norms)

    # Enrichment
    witheofs = nrens != nt
    if witheofs:
        logger.debug('Computing reduced rank ensemble with EOFs analysis')

        # Stack packed variables together
        stacker = Stacker(data, norms=norms, logger=logger)
        meanstate = N.zeros(stacker.ns)
        states = N.asfortranarray(stacker.stacked_data.copy())

        # Compute EOFs
        stddev, svals, svecs, status = f_eofcovar(dim_fields=stacker.ns,
                                                  offsets=1,
                                                  remove_mstate=0,
                                                  do_mv=0,
                                                  states=states,
                                                  meanstate=meanstate)
        if status != 0:
            raise SONATError('Error while calling fortran eofcovar routine')
        neof = svals.size  # computed
        neofr = nrens - 1  # retained
        svals = svals[:neofr] * N.sqrt(
            (neof - 1.) / neof)  # to be consistent with total variance
        svecs = svecs[:, :neofr]

        # Generate ensemble
        sens = f_sampleens(svecs, svals, meanstate, flag=0)

        # Unstack
        ens = stacker.unstack(sens,
                              format=2,
                              rescale='norm' if anomaly else True)
        if getmodes:

            # Modes
            mode_axis = create_axis(N.arange(1, neofr + 1, dtype='i'),
                                    id='mode')
            eofs = stacker.unstack(svecs,
                                   firstdims=mode_axis,
                                   id='{id}_eof',
                                   rescale=False,
                                   format=1)
            svals = MV2.array(svals,
                              axes=[mode_axis],
                              id='ev',
                              attributes={'long_name': 'Eigen values'})
            svals.total_variance = float(stacker.ns)

            # Variance
            vv = stacker.format_arrays([d.var(axis=0) for d in stacker.datas],
                                       id='{id}_variance',
                                       mode=1)
            variance = stacker.unmap(vv)

    else:  # No enrichment -> take the anomaly if requested

        logger.debug('Getting the anomaly to build the ensemble')
        ens = data

        if anomaly:
            if single:
                ens[:] = ens.asma() - ens.asma().mean(axis=0)
            else:
                for i, e in enumerate(ens):
                    ens[i][:] = e.asma() - e.asma().mean(axis=0)

    # Finalize
    getmodes = getmodes and witheofs
    member_axis = create_axis(N.arange(nrens, dtype='i'),
                              id='member',
                              long_name='Member')
    if single:
        ens.setAxis(0, member_axis)
    else:
        for var in ens:
            var.setAxis(0, member_axis)

    # Dump to file
    if ncensfile:
        logger.debug('Dump the ensemble to netcdf')
        checkdir(ncensfile)
        f = cdms2.open(ncensfile, 'w')
        ensvars = list(ens) if not single else [ens]
        if getmodes:
            if single:
                ensvars.append(eofs)
                ensvars.append(variance)
            else:
                ensvars.extend(eofs)
                ensvars.extend(variance)
            ensvars.append(svals)
        for var in ensvars:
            f.write(var)
        f.close()
        logger.created(ncensfile)

    # As dicts
    if asdicts:
        if single:
            ens = OrderedDict([(ens.id, ens)])
            if getmodes:
                eofs = OrderedDict([(eofs.id, eofs)])
                variance = OrderedDict([(variance.id, variance)])
        else:
            ens = OrderedDict([(var.id, var) for var in ens])
            if getmodes:
                eofs = OrderedDict([(var.id, var) for var in eofs])
                variance = OrderedDict([(var.id, var) for var in variance])

    # Return
    if not getmodes:
        return ens
    return ens, dict(eofs=eofs, eigenvalues=svals, variance=variance)
Пример #11
0
    if RE_MATCH_TIME(arg) and not arg.isdigit(): # date
        return arg
    if re.match('[\w\s]*:[\w\s]*(:[\w\s]*)?$', arg): # args to slice
        return slice(*[(None if a=='' else eval(a)) for a in arg.split(':')])
    arg = eval(arg)
    if isinstance(arg, int): # slice at a single index
        return slice(arg, (arg+1) if arg!=-1 else None)
    if isinstance(arg, float): # selection of a single position
        return (arg, arg, 'cob')
    return arg

# Open reader or file
if args.generic:
    f = DS(args.ncfile, args.generic, logger_level='info' if args.verbose else 'error')
else:
    f = cdms2.open(args.ncfile)

# Find a variable name if not provided
if not args.var:
    if args.generic:
        allvars = f.dataset[0].listvariables()
    else:
        allvars = f.listvariables()
    for var in allvars:
        if 'bounds' not in var:
            if args.generic:
                var = '+'+var
            args.var = [var]
            break
else:
    args.var = args.var[:3]
Пример #12
0
        return slice(*[(None if a == '' else eval(a)) for a in arg.split(':')])
    arg = eval(arg)
    if isinstance(arg, int):  # slice at a single index
        return slice(arg, (arg + 1) if arg != -1 else None)
    if isinstance(arg, float):  # selection of a single position
        return (arg, arg, 'cob')
    return arg


# Open reader or file
if args.generic:
    f = DS(args.ncfile,
           args.generic,
           logger_level='info' if args.verbose else 'error')
else:
    f = cdms2.open(args.ncfile)

# Find a variable name if not provided
if not args.var:
    if args.generic:
        allvars = f.dataset[0].listvariables()
    else:
        allvars = f.listvariables()
    for var in allvars:
        if 'bounds' not in var:
            if args.generic:
                var = '+' + var
            args.var = [var]
            break
else:
    args.var = args.var[:3]
Пример #13
0
"""Testing to read/modidy/(re)write NEtCDF files without loosing attributes"""

# Inits
ncfile = "nobounds.nc"
outfile = "result.nc"

# Imports
from vcmq import cdms2, warnings, data_sample, code_dir_name, os
from vacumm.misc.misc import get_atts

# Warnings off
warnings.simplefilter('ignore')

# Open input file
f = cdms2.open(data_sample(ncfile))

# Open output file
outfile = os.path.join(code_dir_name(), outfile)
fw = cdms2.open(outfile, 'w')

# Cancel the default behaviour (<=> generating bounds variables)
cdms2.setAutoBounds('off')

# -- Read and write in another file variables
for var in f.listvariable():
    print '>> ' + var + ' <<'
    data = f(var)
    try:  # - Variable is an n-dimension array
        data.getValue()
        # - Example of moficiation - mask an area
Пример #14
0
#!/usr/bin/env python
# -*- coding: utf8 -*-
"""Manipulation de bathymetrie sur grille (:mod:`vacumm.bathy.bathy`)"""

from vcmq import cdms2, data_sample, create_grid
from vacumm.bathy.bathy import GriddedBathy, NcGriddedBathy, bathy_list, GriddedBathyMerger, plot_bathy

# Lecture d'une variable dans un fichier
cdms2.axis.latitude_aliases.append('y')
cdms2.axis.longitude_aliases.append('x')
f = cdms2.open(data_sample('ETOPO2v2g_flt.grd'))
var = f('z', lon=(-6.1, -4), lat=(47.8, 48.6))
f.close()

# Creation de l'objet de bathy grillee
bathy = GriddedBathy(var)

# Versions masquees
bathy_masked1 = bathy.masked_bathy()                        # -> nb de points masques ?
bathy.set_shoreline('f')                                    # -> essayer 'i'
bathy_masked2 = bathy.masked_bathy()                        # -> nb de points masques ?

# Plot
bathy.plot(title='Direct')


# Bathy standard
from vacumm.bathy.bathy import NcGriddedBathy, bathy_list
print bathy_list().keys()
sbathy = NcGriddedBathy(lon=(-5.2, -3), lat=(48., 48.9), name='etopo2')
sbathy.plot(title='Bathy par defaut')
Пример #15
0
"""Test :meth:`~vacumm.misc.core_plot.Plot2D.plot_streamplot`"""

from vcmq import map2, data_sample, cdms2, code_file_name
import warnings
warnings.filterwarnings('ignore',
                        'Warning: converting a masked element to nan')

f = cdms2.open(data_sample('mars2d.xyt.nc'))
u = f('u', slice(0, 1), squeeze=1)
v = f('v', slice(0, 1), squeeze=1)
f.close()

map2((u, v),
     fill=False,
     contour=False,
     streamplot=True,
     streamplot_density=3,
     streamplot_linewidth='modulus',
     streamplot_lwmod=3,
     streamplot_color='modulus',
     title='Tidal stream lines',
     colorbar_shrink=0.7,
     savefig=code_file_name(),
     right=1,
     show=False)
Пример #16
0
#!/usr/bin/env python
# -*- coding: utf8 -*-
"""Filtres 1D et 2D"""

from vcmq import cdms2, curve2, map2, generic1d, generic2d, shapiro2d, bartlett1d, data_sample, curve2, shapiro1d, gaussian1d, gaussian2d

# Lectures
f = cdms2.open(data_sample('mars3d.t.nc'))
sst1d = f('temp')
f.close()
f = cdms2.open(data_sample('menor.nc'))
sst2d = f('TEMP', time=slice(0, 1), level=slice(-1, None), squeeze=1)
f.close()



# 1D

# - filtrages
sst1d_gen13 = generic1d(sst1d, 13)
sst1d_gen3 = generic1d(sst1d, [1., 1., 1.])
sst1d_sha = shapiro1d(sst1d)        # -> SHAPIRO AVEC GENERIC1D
sst1d_bar13 = bartlett1d(sst1d, 13)
# -> TESTEZ GAUSSIEN SUR 13 HEURES

# - plots
curve2(sst1d, 'k', label='Original', show=False, figsize=(12, 5))
curve2(sst1d_gen13, 'r', label='Generic 13 pts', show=False)
curve2(sst1d_gen3, 'g', label='Generic 3 pts', show=False)
curve2(sst1d_sha, 'b', label='shapiro', show=False)
curve2(sst1d_bar13, 'm', label='Bartlett', legend=True)
Пример #17
0
#!/usr/bin/env python
# -*- coding: utf8 -*-
"""Calculs sur les gros fichiers"""

from vcmq import cdms2, Intervals, data_sample, StatAccum, cdtime, curve2, N


# Initialisations

# - ouverture du fichier et infos temporelles
f = cdms2.open(data_sample("mars3d.xt.xe.nc"))
ctimes = f["xe"].getTime().asComponentTime()

# - passage par un accumulateur de stats
sa = StatAccum(tmean=True)


# Boucle sur des intervals journaliers
for itv in Intervals((ctimes[0], ctimes[-1], "cc"), "day"):  # -> ESSAYER 2 JOURS

    # Lecture
    print itv
    tmp = f("xe", time=itv)

    # Accumulation
    sa += tmp
    del tmp

# Finalisation

# - restitution
Пример #18
0
daxis = create_dep((-100., 1, 5.))
nz = len(daxis)
np = len(lons)
temp_error = N.resize([.2, .5, .3, .8], (nz, np))
sal_error = N.resize([.3, .1, .15, .4], (nz, np))
temp_error = MV2.array(temp_error, id='temp_error')
sal_error = MV2.array(sal_error, id='sal_error')
temp_error[:nz / 2, 2] = MV2.masked
sal_error[:nz / 2, 2] = MV2.masked
temp_error[:3 * nz / 4, 3] = MV2.masked
sal_error[:3 * nz / 4, 3] = MV2.masked
mob = MV2.array([0, 1, 1, 0], id='mobility', fill_value=-1)
paxis = lons.getAxis(0)
paxis.id = 'station'
axes = [daxis, paxis]
f = cdms2.open(ncfile, 'w')
for var in lons, lats, mob, temp_error, sal_error:
    var.setAxis(-1, paxis)
    if var.ndim == 2:
        var.setAxis(0, daxis)
    f.write(var)
f.close()
print os.path.abspath(ncfile)

# HF radars
ncfile = '../data/obs.hfradars.nc'
lon = create_lon((-5.8, -4.7, .1))
lat = create_lat((47.8, 48.3, .1))
nx = len(lon)
ny = len(lat)
u_error = MV2.ones((ny, nx), id='u_error', axes=[lat, lon])
"""Test :func:`~vacumm.misc.grid.regridding.transect` on a curvilinear grid"""

# Inits
ncfile = "swan.four.nc"
lon0 = -5.1
lon1 = -4.9
lat0 = 48.35
lat1 = 48.55


# Imports
from vcmq import cdms2, data_sample, N, transect, curve2, code_file_name, os, \
    add_map_lines,  P, add_shadow

# Read data
f = cdms2.open(data_sample(ncfile))
hs = f('HS', squeeze=1)
f.close()

# Compute transect
hst, lons, lats = transect(hs, (lon0,lon1), (lat0,lat1), getcoords=True)

# Plot along time
s = curve2(hst, figsize=(8,3), title='Spatial transect on curved grid', 
    show=False, top=0.85)

# Add a small map to show the transect positions
o = add_map_lines(hs, lons, lats, map_bgcolor='w', map_bbox= [.6, .2, .3, .5], map_anchor='W')

# Save
figfile = code_file_name(ext='png')
Пример #20
0
#!/usr/bin/env python
# -*- coding: utf8 -*-
"""Le regrillage"""


from vcmq import N, cdms2, curve2, map2, regrid1d, regrid2d, data_sample, \
    create_grid, regrid_method, CDATRegridder, resol, add_grid, P


# Lecture
f = cdms2.open(data_sample('mfs.nc'))
select = dict(lat=(42.5, 43.5), lon=(3, 4))
ssti = f('votemper', level=slice(0, 1), squeeze=1, **select)
f.close()
gridi = ssti.getGrid()
dxi, dyi = resol(gridi)


# Nouvelle grille
factor = 0.634                                                # -> CHANGEZ LE FACTEUR
dxo = dxi*factor
dyo = dyi*factor
grido = create_grid((3.13, 3.8, dxo), (42.62, 43.4, dyo))


# Quelle méthode ?
method = regrid_method(gridi, grido)
print method


# Regrillage
from vcmq import (cdms2, data_sample, N, cdtime, curve2, round_date,
                  create_time, lindates, regrid1d, add_key, P)
from vacumm.misc.grid._interp_ import cellerr1d
from scipy.stats import linregress

# Read data
f = cdms2.open(data_sample('radial_speed.nc'))
sp = f('speed')
spe = f('speed_error')
f.close()

# Create hourly time axis
taxi = sp.getTime()
taxi.toRelativeTime('hours since 2000')
ctimesi = taxi.asComponentTime()
ct0 = round_date(ctimesi[0], 'hour')
ct1 = round_date(ctimesi[-1], 'hour')
taxo = create_time(lindates(ct0, ct1, 1, 'hour'), taxi.units)

# Lag error
# - estimation
els = []
lags = N.arange(1, 6)
for lag in lags:
    els.append(N.sqrt(((sp[lag:] - sp[:-lag])**2).mean()))
els = N.array(els)
a, b, _, _, _ = linregress(lags, els)
# - plot
P.figure(figsize=(6, 6))
P.subplot(211)
P.plot(lags, els, 'o')
# Imports
#from matplotlib import use ; use('Agg')
from vcmq import cdms2, bounds1d, bounds2d, data_sample, MV2, cp_props, N, minmax, code_file_name, P, os, psinfo, gc
#from cdms2.mvCdmsRegrid import CdmsRegrid
from time import time
from traceback import format_exc
from mpi4py import MPI

comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()


#if rank==0: print 'read'
f = cdms2.open(data_sample(ncfile))
vari2d = f(ncvar)[0,yslice,xslice]
f.close()
gridi = vari2d.getGrid()
vari2d[:] += 3*vari2d.mean()
vari2d[:, -1] = MV2.masked
nyi,nxi = vari2d.shape

#if rank==0: print 'expand in time and depth'
vari = MV2.resize(vari2d, (nt, nz)+vari2d.shape)
cp_props(vari2d, vari)


#if rank==0: print 'grido'
loni = gridi.getLongitude()
lati = gridi.getLatitude()
Пример #23
0
#!/usr/bin/env python
# -*- coding: utf8 -*-
"""Gestion des coordonnées sigma et interpolation 1D"""

from vcmq import NcSigma, SigmaGeneralized, sigma2depths, cdms2, N, section2, create_depth, regrid1d, data_sample


# Lecture de la température
f =cdms2.open(data_sample('mars3d.tsigyx.nc'))
data_in = f('TEMP', time=slice(0,2)) # T-YX (- = level)


# Détermination des profondeurs d'après les sigma
sigma_converter = NcSigma.factory(f)                # détection auto et initialisation du convertisseur
# -> VERIFIER QUE sigma_class EST BIEN SigmaGeneralized
depths_in = sigma_converter.sigma_to_depths(selector=dict(time=slice(0,2))).filled()          # lecture eta, etc + conversion
# (Equivalent à depths_in = sigma_converter(selector=dict(time=slice(0,2))).filled())
f.close()


# Creation de l'axe des profondeurs cibles
depths = N.array([0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,
    22,24,26,28,30,32,34,36,38,40,45,50,55,60,65,70,75,80,85,90,95,100,120,140,160])
depths = -depths[::-1] # croissantes et négatives
depth_out = create_depth(depths)


# Interpolation
data_out = regrid1d(data_in, depth_out, axi=depths_in, axis=1, method='linear', extrap=1)

Пример #24
0
    curve2,
    map2,
    regrid1d,
    regrid2d,
    data_sample,
    create_grid,
    regrid_method,
    CDATRegridder,
    resol,
    add_grid,
    P,
)


# Lecture
f = cdms2.open(data_sample("mfs.nc"))
select = dict(lat=(42.5, 43.5), lon=(3, 4))
ssti = f("votemper", level=slice(0, 1), squeeze=1, **select)
f.close()
gridi = ssti.getGrid()
dxi, dyi = resol(gridi)


# Nouvelle grille
factor = 0.634  # -> CHANGEZ LE FACTEUR
dxo = dxi * factor
dyo = dyi * factor
grido = create_grid((3.13, 3.8, dxo), (42.62, 43.4, dyo))


# Quelle méthode ?
Пример #25
0
# -*- coding: utf8 -*-
# Read sea level at Brest
from vcmq import cdms2, P, curve2, savefigs, data_sample
f = cdms2.open(data_sample("tide.sealevel.BREST.mars.nc"))
sea_level = f('sea_level')
f.close()

# Filtering
from vacumm.tide.filters import demerliac
cotes, tide = demerliac(sea_level, get_tide=True)

# Plots
kwplot = dict(date_fmt='%d/%m', show=False, date_rotation=0)
# - tidal signal
curve2(sea_level,
       'k',
       subplot=211,
       label='Original',
       title='Sea level at Brest',
       **kwplot)
curve2(tide, 'b', label='Tidal signal', **kwplot)
P.legend().legendPatch.set_alpha(.7)
# - surcotes/decotes
curve2(cotes, 'r', subplot=212, hspace=.3, label='Demerliac', **kwplot)
P.legend().legendPatch.set_alpha(.7)
savefigs(__file__, savefigs_pdf=True)
P.close()
Пример #26
0
#!/usr/bin/env python
# -*- coding: utf8 -*-
"""Gestion des coordonnées sigma et interpolation 1D"""

from vcmq import NcSigma, SigmaGeneralized, sigma2depths, cdms2, N, section2, create_depth, regrid1d, data_sample

# Lecture de la température
f = cdms2.open(data_sample('mars3d.tsigyx.nc'))
data_in = f('TEMP', time=slice(0, 2))  # T-YX (- = level)

# Détermination des profondeurs d'après les sigma
sigma_converter = NcSigma.factory(
    f)  # détection auto et initialisation du convertisseur
# -> VERIFIER QUE sigma_class EST BIEN SigmaGeneralized
depths_in = sigma_converter.sigma_to_depths(selector=dict(
    time=slice(0, 2))).filled()  # lecture eta, etc + conversion
# (Equivalent à depths_in = sigma_converter(selector=dict(time=slice(0,2))).filled())
f.close()

# Creation de l'axe des profondeurs cibles
depths = N.array([
    0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
    22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85,
    90, 95, 100, 120, 140, 160
])
depths = -depths[::-1]  # croissantes et négatives
depth_out = create_depth(depths)

# Interpolation
data_out = regrid1d(data_in,
                    depth_out,