def plot(model, real, ax):
    
    ### open files
    atm = open_ncfile(home+'tas/historical/'+real+'/tas_'+model+ \
                      '_'+real+'_nino34.nc')
    ocean = open_ncfile(home+'tos/historical/'+real+'/tos_'+model+ \
                        '_'+real+'_nino34.nc')
        
    if model == 'HadGEM2':
        tos = ocean.variables['tos'][:1752,:,:]
        tas = atm.variables['tas'][:1752,:,:]
        
    else:
        tos = ocean.variables['tos'][:,:,:]
        tas = atm.variables['tas'][:,:,:]

    ### make arrays one dimensional for correlation  and scatterplot     
    tas = tas.flatten()
    tos = tos.flatten()

    ### make arrays two dimensional for linear regression  
    X = tas.reshape(-1, 1)  
    Y = tos.reshape(-1, 1)  
    
    ### Linear regression    
    reg = LinearRegression()  
    reg.fit(X, Y)  
    Y_pred = reg.predict(X)
    
    ### Information about linear regression model
    X2 = sm.add_constant(X)
    est = sm.OLS(Y, X2)
    est2 = est.fit()
    print(est2.summary())
    
    ### Pearson correlation
    corr, p_value = pearsonr(X, Y)
    
    ### Plot
    ax.scatter(tas, tos, marker='o', s=2, c='k')
    ax.plot(tas, Y_pred, label='Reg. coeff: '+str("%.2f" % reg.coef_[0][0])+
            '\nIntercept: '+ str("%.2f" % reg.intercept_[0]) + 
            '\nCorr. coeff: '+  str("%.2f" % corr[0]) + 
            '\nP-value: '+  str("%.2f" % p_value[0]))

    if ax in (ax3,ax4,ax5):
        ax.set_xlabel('Tas [K]')
    else:
        pass

    ax.set_xlim(287,304)
    ax.set_ylim(287,304)
    ax.set_ylabel('Tos [K]')
    ax.set_title(real)
    ax.legend(loc='best', frameon=False)
    plt.suptitle(model)
Esempio n. 2
0
def read_gsat_1pctCO2(indir, models, ignore):
    """
    Read GSAT array from different model files, save anomaly in one ouput array with one dimension being total number
    of runs, the other being the time
    :param indir: directories where GSAT files are stored
           models: dict containing models' characteristics
           ignore: list of models to ignore
    :return: GSAT anomaly array (years, nb of runs)

    """

    nmodels = len(models)
    nMembers = np.ma.zeros(nmodels)
    timN = 140

    indir_1pctCO2 = indir + '1pctCO2/'
    indir_piC = indir + 'piControl/'

    # -- Initialize GSAT
    gsat_anom = np.ma.masked_all((timN, nmodels - len(ignore)))
    imod = 0

    for i, model in enumerate(models):

        if model['name'] not in ignore:

            # Read GSAT 1pctCO2
            file_gsat_CO2 = glob.glob(indir_1pctCO2 + '*' + model['name'] +
                                      '*.nc')[0]  # GSAT File
            fgsatCO2 = open_ncfile(file_gsat_CO2, 'r')
            gsatread_CO2 = fgsatCO2.variables['GSAT'][0:timN]
            print('- Reading GSAT of %s' % (model['name'], ))

            # Read GSAT PiControl
            file_gsat_piC = glob.glob(indir_piC + '*' + model['name'] +
                                      '*.nc')[0]  # GSAT File
            fgsatpiC = open_ncfile(file_gsat_piC, 'r')
            gsatread_piC = fgsatpiC.variables['GSAT'][:]

            # Compute and Save GSAT anomaly
            gsat_anom[:, imod] = gsatread_CO2 - np.ma.average(gsatread_piC,
                                                              axis=0)
            imod = imod + 1

            nMembers[i] = 1

    print('Total number of models:', np.sum(nMembers))

    return gsat_anom
Esempio n. 3
0
def read_toe_1pctCO2(varread, indir, models, ignore, ndomains):
    """
    Read ToE array from different model files, save in one ouput array with one dimension being total number
    of models
    :param varread: string, ToE variable name in files
           indir: directory of files to read (one file per model)
           models: dict containing models' characteristics
           ignore: list of models to ignore
           ndomains: number of domain names
    :return: varToEA: numpy array (number of total runs, number of domains) ToE in the Atlantic
             varToEP numpy array (number of total runs, number of domains)
             varToEI numpy array (number of total runs, number of domains)
             nMembers: numpy array containing number of members per model, dimension=nb of models
    """

    nmodels = len(models)

    # -- Initialize varToE containing ToE
    varToEA = np.ma.masked_all((nmodels, ndomains))
    varToEP = np.ma.masked_all((nmodels, ndomains))
    varToEI = np.ma.masked_all((nmodels, ndomains))

    nMembers = np.ma.zeros(nmodels)

    for i, model in enumerate(models):

        if model['name'] not in ignore:

            # Read file
            file_CO2piC = glob.glob(indir + '/' + '*' + model['name'] +
                                    '*.nc')[0]
            fpiC = open_ncfile(file_CO2piC, 'r')

            # Read ToE (basin, domain)
            toeread = fpiC.variables[varread][:]
            print('- Reading ToE of ' + model['name'])

            # Save ToE
            varToEA[i, :] = toeread[1, :]
            varToEP[i, :] = toeread[2, :]
            varToEI[i, :] = toeread[3, :]

            nMembers[i] = 1

    print('Total number of models:', np.sum(nMembers))

    # Remove rows with masked values (due to ignored model(s))
    if len(ignore) != 0:
        idx = np.argwhere(nMembers == 0)[0]
        if len(idx) != 0:
            idx = idx[0]
            varToEA_bis = np.delete(varToEA, idx, 0)  # Delete row at index idx
            varToEA = np.ma.masked_greater(
                varToEA_bis, 200)  # Now turn back into a masked array
            varToEP_bis = np.delete(varToEP, idx, 0)
            varToEP = np.ma.masked_greater(varToEP_bis, 200)
            varToEI_bis = np.delete(varToEI, idx, 0)
            varToEI = np.ma.masked_greater(varToEI_bis, 200)

    return varToEA, varToEP, varToEI, nMembers
Esempio n. 4
0
def ammann_preprocessing():

    file = RAW_DATA_FOLDER + "ammann2003b_volcanics.nc"
    nc = open_ncfile(file)

    aod_var = nc.variables['TAUSTR'][:, :]
    aod = np.ndarray((1, len(aod_var[:, 0]), len(aod_var[0, :])))
    aod[0] = aod_var
    wavelength = [550]
    time = nc.variables['time'][:]

    years = []
    # Convert yyyymm format to year format using center of each month
    for index, element in enumerate(time):
        fractional_year = float(
            str(element)[0:4]) + (float(str(element)[4:6]) - 0.5) / 12
        fractional_year = round(fractional_year, 3)
        years.append(fractional_year)
    time = years
    time_units = 'years'

    latitude = nc.variables['lat'][:]

    aod_obj = AodData(aod, latitude, time, time_units, wavelength)
    aod_obj.save_as_nc(PROCESSED_DATA_FOLDER + 'processed_ammann.nc')
Esempio n. 5
0
def evolv2k_preprocessing():
    file = RAW_DATA_FOLDER + "eVolv2k_v3_EVA_AOD_-500_1900_1.nc"
    nc = open_ncfile(file)
    aod_var = nc.variables['aod550'][:, :]
    aod = np.ndarray((1, len(aod_var[:, 0]), len(aod_var[0, :])))
    aod[0] = aod_var
    wavelength = [550]
    time_units = 'years'
    time = nc.variables['time']
    latitude = nc.variables['lat']
    aod_obj = AodData(aod, latitude, time, time_units, wavelength)
    aod_obj.save_as_nc(PROCESSED_DATA_FOLDER + 'processed_eVolv2k.nc')
Esempio n. 6
0
def ugh(period, step):

    file = open_ncfile('MODIS_GPP_' + period + '.nc')

    lat = file.variables['lat'][:]
    lon = file.variables['lon'][:]
    veg = file.variables['Gpp_500m'][step, :, :]

    # create dataset
    ds = xr.Dataset(
        {
            'GPP':
            xr.DataArray(data=veg,
                         dims=['lat', 'lon'],
                         coords=[lat, lon],
                         attrs={
                             'long_name': 'GPP',
                             'units': 'kgC m-2'
                         })
        },
        attrs={
            'Conventions': 'CF-1.6',
            'Institution': 'Land Processes Distributed Active '
            'Archive Center (LP DAAC)',
            'Source': 'AppEEARS v2.53',
            'Title': 'MOD44B.006 for aid0001'
        })

    ds['lat'].attrs = {
        'units': 'degrees_north',
        'long_name': 'latitude',
        'standard_name': 'latitude',
        'axis': 'Y'
    }
    ds['lon'].attrs = {
        'units': 'degrees_east',
        'long_name': 'longitude',
        'standard_name': 'longitude',
        'axis': 'X'
    }

    ds.to_netcdf('GPP_' + str(step) + '.nc',
                 encoding={
                     'lat': {
                         'dtype': 'double'
                     },
                     'lon': {
                         'dtype': 'double'
                     },
                     'GPP': {
                         'dtype': 'float32'
                     }
                 })
Esempio n. 7
0
def read_toe_rcp85(varread, listfiles, ignore, ndomains):
    """
    Read ToE array from different model files, save in one ouput array with one dimension being total number
    of runs
    :param varread: string, ToE variable name in files
           listfiles: list of files to read (one file per model)
           ignore: list of models to ignore
           ndomains: number of domain names
    :return: varToEA: numpy array (number of total runs, number of domains) ToE in the Atlantic
             varToEP numpy array (number of total runs, number of domains)
             varToEI numpy array (number of total runs, number of domains)
             nMembers: numpy array containing number of members per model, dimension=nb of models
    """
    nruns = 0
    nrunmax = 100
    nmodels = len(listfiles)
    nMembers = np.ma.zeros(nmodels)

    # Initialize varToE containing ToE of all runs
    varToEA = np.ma.masked_all((nrunmax, ndomains))
    varToEP = np.ma.masked_all((nrunmax, ndomains))
    varToEI = np.ma.masked_all((nrunmax, ndomains))

    for i in range(nmodels):
        file_toe = listfiles[i]
        ftoe = open_ncfile(file_toe, 'r')
        name = os.path.basename(file_toe).split('.')[1]

        if name not in ignore:
            # Read ToE (members, basin, domain)
            toeread = ftoe.variables[varread][:]
            nMembers[i] = int(toeread.shape[0])
            print('- Reading ToE of %s with %d members' % (name, nMembers[i]))
            nruns1 = int(nruns + nMembers[i])

            run_labels = ftoe.variables['run_label'][:]
            #print('   ',run_labels)

            # Save ToE
            varToEA[nruns:nruns1, :] = toeread[:, 1, :]
            varToEP[nruns:nruns1, :] = toeread[:, 2, :]
            varToEI[nruns:nruns1, :] = toeread[:, 3, :]

            nruns = nruns1

    print('Total number of runs:', nruns)
    varToEA = varToEA[0:nruns, :]
    varToEP = varToEP[0:nruns, :]
    varToEI = varToEI[0:nruns, :]

    return varToEA, varToEP, varToEI, nMembers
Esempio n. 8
0
def tau_map_preprocessing():
    file = RAW_DATA_FOLDER + "tau_map_2012-12.nc"
    nc = open_ncfile(file)

    aod_var = nc.variables['tau'][:, :]
    aod = np.ndarray((1, len(aod_var[:, 0]), len(aod_var[0, :])))
    aod[0] = aod_var
    wavelength = [550]
    time_units = 'days'
    time = nc.variables['month'][:]
    latitude = nc.variables['lat'][:]

    aod_obj = AodData(aod, latitude, time, time_units, wavelength)
    aod_obj.save_as_nc(PROCESSED_DATA_FOLDER + 'processed_tau_map.nc')
Esempio n. 9
0
def plot_gm_aod(dataset, time='years', fig=None, ax=None):
    # open the data
    nc = open_ncfile(dataset)
    wavelength = nc.variables['wavelength'][0]
    gm_aod = nc.variables['GM_AOD'][0, :]
    time_data = nc.variables[time][:]
    # Remove high value data masks
    gm_aod = np.ma.masked_where(gm_aod > 100, gm_aod)
    # Plot the data
    title = ("Global Mean AOD Trend at Wavelength: " + str(wavelength) + 'nm')
    xlabel = ("Time (" + time + ")")
    ylabel = ("Global Mean AOD at lambda = " + str(wavelength))
    fig = figure(title=title, x_axis_label=xlabel, y_axis_label=ylabel)
    fig.line(time_data, gm_aod)
    return fig
Esempio n. 10
0
def volmip_preprocessing():
    file = RAW_DATA_FOLDER + "CMIP_VOLMIP550_radiation_v4_1850-2018.nc"
    nc = open_ncfile(file)

    alt = nc.variables['altitude'][:]
    wl1_earth = nc.variables['wl1_earth'][:]
    wl1_sun = nc.variables['wl1_sun'][:]
    lat = nc.variables['latitude'][:]
    ext_earth = nc.variables['ext_earth'][:, :, :, :]
    ext_sun = nc.variables['ext_sun'][:, :, :, :]
    wavelength = [550]
    time_units = 'months'
    time = nc.variables['month'][:]

    # EXT dimensions go wl1, lat, alt, time, need to rearrange
    # Want time, wl1, lat, alt
    ext_earth = np.transpose(ext_earth, (3, 0, 1, 2))
    ext_sun = np.transpose(ext_sun, (3, 0, 1, 2))

    # Want to iterate over all altitudes, in case altitude differences are not consistently spaced.
    # Build the aod arrays and initialize to zeros, so we can add to them
    aod_earth = np.zeros((len(time), len(wl1_earth), len(lat)))
    aod_sun = np.zeros((len(time), len(wl1_sun), len(lat)))
    # Extrapolate past edge of altitude, in order to get a change in altitude for first point
    last_alt = alt[0] - (alt[1] - alt[0])
    # Sum ext*d(alt) over all altitudes
    for alt_index, alt in enumerate(alt):
        aod_earth += ext_earth[:, :, :, alt_index] * (alt - last_alt)
        aod_sun += ext_sun[:, :, :, alt_index] * (alt - last_alt)
        last_alt = alt

    # Use aod_sun because it is the one at 550nm
    aod = np.transpose(
        aod_sun,
        (1, 0,
         2))  # Rearrange again - AodData expects wavelength, time, latitude
    aod_obj = AodData(aod, lat, time, time_units, wavelength)
    aod_obj.save_as_nc(PROCESSED_DATA_FOLDER + 'processed_volmip.nc')
Esempio n. 11
0
def CMIP6_preprocessing():
    file = RAW_DATA_FOLDER + "CMIP_1850_2014_extinction_550nm_strat_only_v3.nc"
    nc = open_ncfile(file)

    alt = nc.variables['altitude'][:]
    lat = nc.variables['latitude'][:]
    ext = nc.variables['ext550'][:, :, :]
    wavelength = [550]
    time_units = 'months'
    time = nc.variables['month'][:]  # months since jan 1 1850
    # NOTE the nc file data says this is in months since jan 1960, but it appears that is incorrect.

    # Add wavelength dimension to extension, only 1 because ext is at 550
    ext_tmp = np.zeros((1, len(lat), len(alt), len(time)))
    ext_tmp[0, :, :, :] = ext
    ext = ext_tmp
    # EXT dimensions go wl1, lat, alt, time, need to rearrange
    # Want time, wl1, lat, alt
    ext = np.transpose(ext, (3, 0, 1, 2))

    # Want to iterate over all altitudes, in case altitude differences are not consistently spaced.
    # Build the aod arrays and initialize to zeros, so we can add to them
    aod = np.zeros((len(time), 1, len(lat)))
    # Extrapolate past edge of altitude, in order to get a change in altitude for first point
    last_alt = alt[0] - (alt[1] - alt[0])
    # Sum ext*d(alt) over all altitudes
    for alt_index, alt in enumerate(alt):
        aod += ext[:, :, :, alt_index] * (alt - last_alt)
        last_alt = alt

    # Use aod_sun because it is the one at 550nm
    aod = np.transpose(
        aod,
        (1, 0,
         2))  # Rearrange again - AodData expects wavelength, time, latitude
    aod_obj = AodData(aod, lat, time, time_units, wavelength)
    aod_obj.save_as_nc(PROCESSED_DATA_FOLDER + 'processed_cmip6.nc')
Esempio n. 12
0
varToEA_2 = np.ma.masked_all((nrunmax, len(domains)))
varToEP_2 = np.ma.masked_all((nrunmax, len(domains)))
varToEI_2 = np.ma.masked_all((nrunmax, len(domains)))

# -- Loop over models
if use_piC == True:
    indir = indir_rcppiC
else:
    indir = indir_rcphn
listfiles = glob.glob(indir + method_noise_rcp + '/*.nc')
nmodels = len(listfiles)

for i in range(nmodels):

    file_toe = listfiles[i]
    ftoe = open_ncfile(file_toe, 'r')
    name = os.path.basename(file_toe).split('.')[1]

    # If use same runs in vs. histNat as in vs. PiControl, take out deficient models
    if (runs_rcp
            == 'all') or (runs_rcp == 'same' and name != 'GISS-E2-R'
                          and name != 'FGOALS-g2' and name != 'MIROC-ESM'):

        # Read ToE (members, basin, domain)
        toe1read = ftoe.variables[var + 'ToE1'][:]
        toe2read = ftoe.variables[var + 'ToE2'][:]
        nMembers[i] = toe2read.shape[0]
        print('- Reading ToE of %s with %d members' % (name, nMembers[i]))
        nruns1 = nruns + nMembers[i]

        # Save ToE
Esempio n. 13
0
    y1 = 33; y2 = 38
elif focus_1pctCO2 == '2*CO2':
    y1 = 69; y2 = 74
else:
    y1 = 134; y2 = 140

imodel = 13 # Choose model index in model list (modelsDef.py)
# for imodel in range(16):

# -- Choose work files

if name == 'Durack & Wijffels':
    indir = '/data/ericglod/Density_binning/Obs_Prod_density_april16/'
    file = 'DurackandWijffels_GlobalOceanChanges-NeutralDensity_1950-2000_120209_11_46_11_beta.nc'
    data = indir + file
    fh2d = open_ncfile(data, 'r')


if name == 'mme_hist':
    indir = '/data/ericglod/Density_binning/Prod_density_april15/mme_hist/'
    file_2d = 'cmip5.multimodel_Nat.historical.ensm.an.ocn.Omon.density_zon2D.nc'
    file_1d = 'cmip5.multimodel_Nat.historical.ensm.an.ocn.Omon.density_zon1D.nc'
    data_2d = indir + file_2d
    data_1d = indir + file_1d
    fh2d = open_ncfile(data_2d, 'r')
    fh1d = open_ncfile(data_1d, 'r')


if name == 'mme_hist_histNat':
    indirh = '/data/ericglod/Density_binning/Prod_density_april15/mme_hist/'
    fileh_2d = 'cmip5.multimodel_Nat.historical.ensm.an.ocn.Omon.density_zon2D.nc'
Esempio n. 14
0
import matplotlib as mpl
import matplotlib.pyplot as plt
from   matplotlib.backends.backend_pdf import PdfPages
from   matplotlib.patches import Polygon
from   netCDF4 import Dataset as open_ncfile

t1 = time.time()                                    #-- retrieve start time

#--  define path, grid and variable
diri    = '/Users/k204045/NCL/PyNGL/User_Guide_examples/'   #-- data directory
fname   = 'ta_ps_850.nc'                            #-- data file
gname   = 'r2b4_amip.nc'                            #-- grid info file
VarName = 'ta'                                      #-- variable name       

#--  open data and grid file and read data and coordinate variable
f = open_ncfile(diri + fname,'r')                   #-- add data file
g = open_ncfile(diri + gname,'r')                   #-- add grid file (not contained in data file!!!)

#-- read first timestep and level of variable 'ta' 
variable =  f.variables['ta']                       #-- first time step, lev, ncells
var      =  variable[0,0,:]                         #-- ta [time,lev,ncells]
var      =  var - 273.15                            #-- convert to degrees Celsius

title    = 'Matplotlib: ICON model data'            #-- plot title string

#-- define _FillValue and missing_value if not existing
missing = -1e20

if not hasattr(var,'_FillValue'):
   var._FillValue  =  missing                       #-- set _FillValue
if not hasattr(var,'missing_value'): 
Esempio n. 15
0
def proj_map(kind, plt, ax, minmax, clevsm, clevsm_bold, lat, lon, cmap, isopyc, sliced_density, var1,
             var2 = None, var3 = None):

    isopyc_idx = np.argmin(np.abs(sliced_density - isopyc))

    if kind == 'hist-histNat':
        var_hist = np.squeeze(var1[:,isopyc_idx,:])
        var_histNat = np.squeeze(var2[:,isopyc_idx,:])
        # Difference
        var_diff = np.ma.average(var_hist[-5:, :], axis=0) - np.ma.average(var_histNat, axis=0)
        # Climatology
        var_mean = np.ma.average(var_hist, axis=0)

    elif kind == 'Durack':
        var_diff = np.squeeze(var1[isopyc_idx,:])
        var_mean = np.squeeze(var2[isopyc_idx,:])
        #Error field
        var_diff_er = np.squeeze(var3[isopyc_idx,:])
        var_diff_er = var_diff_er*1.1 # to account for a potential underestimation of the error determined by a bootstrap analysis
        var_diff_er = var_diff_er*2.58 # 99% level
        not_signif_change = np.where(np.absolute(var_diff)<var_diff_er, 1, 0)

    elif kind == 'hist':
        var = np.squeeze(var1[:,isopyc_idx,:,:])
        # look at difference between 1950 and end to compare with obs
        var_diff = np.ma.average(var[-5:,:], axis=0) - np.ma.average(var[0:5,:], axis=0)
        # Climatology
        var_mean = np.ma.average(var, axis=0)

    else :
        var_obs = np.squeeze(var1[:,isopyc_idx,:,:])
        var_diff = np.ma.average(var_obs[-5:,:], axis=0) - np.ma.average(var_obs[0:5,:], axis=0)
        # Climatology
        var_mean = np.ma.average(var_obs, axis=0)

    # Create meshgrid
    lon2d, lat2d = np.meshgrid(lon, lat)

    # Levels for shade plot
    levels = np.linspace(minmax[0],minmax[1],minmax[2])

    # Format for contour labels
    levfmt = '%.0f'
    if abs(clevsm[1] - clevsm[0]) < 1:
        levfmt = '%.1f'
    if abs(clevsm[1] - clevsm[0]) < 0.1:
        levfmt = '%.2f'

    # Read grid for coloring continents
    f2 = open_ncfile('/home/ysilvy/Density_bining/Yona_analysis/data/140807_WOD13_masks.nc', 'r')
    landsea = f2.variables['landsea'][:]

    # Create mask
    sea_mask = landsea != 1
    landsea = np.ma.array(landsea, mask=sea_mask)
    landsea[landsea == 1] = 0.2

    # Basemap
    map = Basemap(projection='cyl', llcrnrlon=20, llcrnrlat=-70., urcrnrlon=380, urcrnrlat=70, ax=ax)
    map.drawmapboundary(fill_color='1')
    map.drawparallels(np.arange(-60, 61, 20.), labels=[1, 0, 1, 0], linewidth=0.5)
    map.drawmeridians(np.arange(-180, 180, 60), labels=[0, 0, 0, 1], linewidth=0.5)

    # Draw filled contours of diff
    cnplot = map.contourf(lon2d, lat2d, var_diff, cmap=cmap, levels = levels, latlon=True, extend='both')

    if kind == 'Durack':
        map.fillcontinents(color='black')
        # -- Plot areas where data is not significant
        error_plot = map.contourf(lon2d, lat2d, not_signif_change, levels=[0.25,0.5,1.5], colors='None',
                               hatches=['','....'], edgecolor='0.6', linewidth=0.0, latlon=True)

    else :
        # Draw continents
        pc2 = map.pcolormesh(lon2d, lat2d, landsea, shading='flat', cmap=plt.cm.gray, latlon=True)

    # Draw mean contours
    cpplot1 = map.contour(lon2d, lat2d, var_mean, clevsm, colors = 'black', linewidths=0.5, latlon=True)
    #plt.clabel(cpplot1, inline=1, fontsize=10, fmt=levfmt)
    cpplot2 = map.contour(lon2d, lat2d, var_mean, clevsm_bold, colors='black', linewidths=2, latlon=True)
    plt.clabel(cpplot2, inline=1, fontsize=12, fontweight='bold', fmt=levfmt)

    # Indicate which isopycnal we're projecting on
    ax.text(0.1, 0.85, '$\gamma = %.1f$' %(isopyc,), transform=ax.transAxes, fontweight='bold',color='w', fontsize=17)

    return cnplot, levels
def plot(model, name, experiment, position, v):
    folderIN = home + '/TRENDY/S2/' + v + '/bootstrap/'
    folderlpj = home + '/lpj_guess/runs/global_monthly_CRUNCEP/bootstrap/'

    fname_cp = 'ensmean_annual_1960-2013_CP_detrend.nc'
    fname_ep = 'ensmean_annual_1960-2013_EP_detrend.nc'

    #-- open net-cdf and read in variables
    if model == 'LPJ-GUESS':
        if experiment == 'CP':
            trendy = open_ncfile(folderlpj + v + '_' + fname_cp)
        else:
            trendy = open_ncfile(folderlpj + v + '_' + fname_ep)

        lat = trendy.variables['Lat'][:]
        lon = trendy.variables['Lon'][:]

    elif model == 'TRENDY':
        if experiment == 'CP':
            trendy = open_ncfile(folderIN + fname_cp)
        else:
            trendy = open_ncfile(folderIN + fname_ep)

        lat = trendy.variables['latitude'][:]
        lon = trendy.variables['longitude'][:]

    else:
        if experiment == 'CP':
            trendy = open_ncfile(folderIN + model + '_' + fname_cp)
        else:
            trendy = open_ncfile(folderIN + model + '_' + fname_ep)

        lat = trendy.variables['latitude'][:]
        lon = trendy.variables['longitude'][:]

    var = trendy.variables[v][0, :, :]

    if v in ('mnee'):
        var = var * (-1000)
    else:
        var = var * 1000

    plt.subplot(5, 4, position)

    #-- create map
    map = Basemap(projection='cyl',
                  llcrnrlat=-60.,
                  urcrnrlat=84.,
                  resolution='c',
                  llcrnrlon=0.,
                  urcrnrlon=360.)

    #-- draw coastlines and edge of map
    map.drawcoastlines(linewidth=0.2)
    x, y = map(*np.meshgrid(lon, lat))
    cut_data = var[:-1, :-1]

    #-- define the colormap
    if v in ('resp', 'ter'):
        cmap = plt.cm.BrBG_r
    else:
        cmap = plt.cm.BrBG

    #-- extract all colors colormap
    cmaplist = [cmap(i) for i in range(cmap.N)]

    #-- create the new map
    cmap = cmap.from_list('Custom cmap', cmaplist, cmap.N)

    #-- define the bins and normalize
    levels = np.arange(-28, 32, 4)
    norm = BoundaryNorm(levels, ncolors=cmap.N, clip=True)

    #-- draw filled contours
    cnplot = map.pcolormesh(x, y, cut_data, cmap=cmap, norm=norm)

    #-- plot title for subplots
    plt.title(name + ' ' + experiment, fontsize=12)

    #-- add colourbar
    plt.subplots_adjust(top=0.95,
                        left=0.02,
                        right=0.98,
                        bottom=0.11,
                        wspace=0.05,
                        hspace=0.1)
    cax = plt.axes([0.1, 0.08, 0.8, 0.02])
    cbar = fig.colorbar(cnplot, orientation='horizontal', cax=cax)
    cbar.ax.tick_params(labelsize=10)
    plt.colorbar(ticks=levels, cax=cax, orientation='horizontal')

    if v in ('resp', 'ter'):
        cbar.set_label('Composite anomaly TER [gC]', fontsize=13)
    elif v in ('dist'):
        cbar.set_label('Composite anomaly DIST [gC]', fontsize=13)
    elif v in ('nbp', 'mnee'):
        cbar.set_label('Composite anomaly NBP [gC]', fontsize=13)
    elif v in ('mgpp', 'gpp'):
        cbar.set_label('Composite anomaly GPP [gC]', fontsize=13)
    else:
        pass
Esempio n. 17
0
    # Data path
    indirh = '/data/ericglod/Density_binning/Prod_density_april15/Raw/mme_hist/mme/'
    fileh = 'cmip5.' + name + '.historical.ensm.an.ocn.Omon.density.ver-' + model['file_end'] + '.nc'

else:
    indirh = '/data/ericglod/Density_binning/Prod_density_april15/Raw/mme_hist/mme/'
    fileh = 'cmip5.multimodel_All.historical.ensm.an.ocn.Omon.density_3D.nc'
    # for hist - histNat, mme or not ready so use ensemble mean for now
    #fileh = 'cmip5.GFDL-ESM2M.historical.ensm.an.ocn.Omon.density.ver-v20130226.nc'

    if name == 'mme_hist_histNat':
        indirhn = '/data/ericglod/Density_binning/Prod_density_april15/Raw/mme_histNat/mme/'
        filehn = 'cmip5.GFDL-ESM2M.historicalNat.ensm.an.ocn.Omon.density.ver-v20110601.nc'
        datahn = indirhn + filehn
        fhn = open_ncfile(datahn,'r')

datah = indirh + fileh
fh = open_ncfile(datah,'r')

# Read variables
lat = fh.variables['latitude'][:]
lon = fh.variables['longitude'][:]


# ------ Define work and variables ---------

varname = defVarmme('salinity'); v = 'S'
#varname = defVarmme('temp'); v = 'T'

density = np.array([19.0, 19.2, 19.4, 19.6, 19.8, 20.0, 20.2, 20.4, 20.6, 20.8, 21.0, 21.2,
Esempio n. 18
0
# -------------------------------------------------------------------------------

# -- Define variable properties

var = varname['var']
minmax = varname['1dminmax']
clevsm = varname['clevsm']
legVar = varname['legVar']
unit = varname['unit']

minmax = varname['1dminmax']
if ToE_Method == 'agreehist':
    minmax=[-1.,1.]
#
# -- Open netcdf files
nc2dh = open_ncfile(inDirh + '/' + file2dh)
if ToE_Method == 'usehistNat':
    nc2dhn = open_ncfile(inDirhn + '/' + file2dhn)

# -- Read variables
if ToE_Method == 'agreehist':
    # Read model agreement variables
    tvaraa = nc2dh.variables[var + 'Agree'][:, 1, :, :].squeeze()
    tvarap = nc2dh.variables[var + 'Agree'][:, 2, :, :].squeeze()
    tvarai = nc2dh.variables[var + 'Agree'][:, 3, :, :].squeeze()
if ToE_Method == 'usehistNat':
    print 'use histNat'
    # Read var in bowl for hist and histnat
    tvaraah = nc2dh.variables[var + 'Bowl'][:, 1, :, :].squeeze()
    tvaraph = nc2dh.variables[var + 'Bowl'][:, 2, :, :].squeeze()
    tvaraih = nc2dh.variables[var + 'Bowl'][:, 3, :, :].squeeze()
Esempio n. 19
0
#varname = defVar('persist')
#varname = defVar('heatcontent')

iniyear = 1860
finalyear = 2005
deltay = 10.

# -------------------------------------------------------------------------------
# file inits

os.chdir(inDirh)
listFiles = glob.glob('cmip5.*_zon2D.nc')
var = varname['var']

# find dimensions
fi = open_ncfile(inDirh+'/'+listFiles[0])
print inDirh+'/'+listFiles[0]
isond0  = fi.variables['isondepth'] ; # Create variable handle
latN = isond0.shape[3]
levN = isond0.shape[2]
basN = isond0.shape[1]
timN = isond0.shape[0]

levr = fi.variables['lev'][:]
lats = fi.variables['latitude'][:]

# init cumulated number of members
nruns = 0
nrunmax = 200
# init arrays for ToE
toe1,toe2 = [np.ma.ones([nrunmax,basN,levN,latN], dtype='float32')*1. for _ in range(2)]
# -- Initialize varnoise hist and histNat containing std of all runs for each basin
varnoise_ha = np.ma.masked_all((nrunmax, len(domains)))
varnoise_hp = np.ma.masked_all((nrunmax, len(domains)))
varnoise_hi = np.ma.masked_all((nrunmax, len(domains)))
varnoise_hna = np.ma.masked_all((nrunmax, len(domains)))
varnoise_hnp = np.ma.masked_all((nrunmax, len(domains)))
varnoise_hni = np.ma.masked_all((nrunmax, len(domains)))

# -- Loop over models

for i, model in enumerate(models):

    # Read file
    file = 'cmip5.' + model['name'] + '.noise_domains_hist_histNat.nc'
    f = open_ncfile(indir_noise + file, 'r')

    # Read noise (members, basin, domain)
    varstdh = f.variables[var+'stdh'][:]
    varstdhn = f.variables[var+'stdhn'][:]
    nMembers_h[i] = varstdh.shape[0]
    nMembers_hn[i] = varstdhn.shape[0]
    nruns1_h = nruns_h + nMembers_h[i]
    nruns1_hn = nruns_hn + nMembers_hn[i]
    print '- Reading', model['name'], 'with', nMembers_h[i], 'hist members and', nMembers_hn[i], 'histNat members'
    print ''

    # Save noise
    varnoise_ha[nruns_h:nruns1_h,:] = varstdh[:,1,:]
    varnoise_hp[nruns_h:nruns1_h,:] = varstdh[:,2,:]
    varnoise_hi[nruns_h:nruns1_h,:] = varstdh[:,3,:]
if name == 'mme':
    file2d_1pctCO2 = 'cmip5.multimodel_piCtl.1pctCO2.ensm.an.ocn.Omon.density_zon2D.nc'
    file1d_1pctCO2 = 'cmip5.multimodel_piCtl.1pctCO2.ensm.an.ocn.Omon.density_zon1D.nc'
    file2d_piC = 'cmip5.multimodel_1pct.piControl.ensm.an.ocn.Omon.density_zon2D.nc'
    file1d_piC = 'cmip5.multimodel_1pct.piControl.ensm.an.ocn.Omon.density_zon1D.nc'

else :
    models = defModelsCO2piC()
    model = models[imodel] # Iterate

    file2d_1pctCO2 = 'cmip5.' + model['name'] + '.1pctCO2.ensm.an.ocn.Omon.density.ver-' + model['file_end_CO2'] + '_zon2D.nc'
    file1d_1pctCO2 = 'cmip5.' + model['name'] + '.1pctCO2.ensm.an.ocn.Omon.density.ver-' + model['file_end_CO2'] + '_zon1D.nc'
    file2d_piC = 'cmip5.' + model['name'] + '.piControl.ensm.an.ocn.Omon.density.ver-' + model['file_end_piC'] + '_zon2D.nc'
    file1d_piC = 'cmip5.' + model['name'] + '.piControl.ensm.an.ocn.Omon.density.ver-' + model['file_end_piC'] + '_zon1D.nc'

f2dCO2 = open_ncfile(indir_1pctCO2 + file2d_1pctCO2,'r')
f1dCO2 = open_ncfile(indir_1pctCO2 + file1d_1pctCO2,'r')
f2dpiC = open_ncfile(indir_piC + file2d_piC,'r')
f1dpiC = open_ncfile(indir_piC + file1d_piC,'r')


# ----- Work ------

varname = defVarmme('salinity'); v = 'S'
#varname = defVarmme('temp'); v = 'T'
#varname = defVarmme('depth'); v = 'Z'

multStd = 2. # detect ToE at multStd std dev of piControl

labBowl = ['piControl', '2*CO2']
import matplotlib as mpl
import matplotlib.pyplot as plt
from   matplotlib.backends.backend_pdf import PdfPages
from   matplotlib.patches import Polygon
from   netCDF4 import Dataset as open_ncfile

t1 = time.time()                                    #-- retrieve start time

#--  define path, grid and variable
diri    = '/Users/k204045/NCL/PyNGL/User_Guide_examples/'   #-- data directory
fname   = 'ta_ps_850.nc'                            #-- data file
gname   = 'r2b4_amip.nc'                            #-- grid info file
VarName = 'ta'                                      #-- variable name       

#--  open data and grid file and read data and coordinate variable
f = open_ncfile(diri + fname,'r')                   #-- add data file
g = open_ncfile(diri + gname,'r')                   #-- add grid file (not contained in data file!!!)

#-- read first timestep and level of variable 'ta' 
variable =  f.variables['ta']                       #-- first time step, lev, ncells
var      =  variable[0,0,:]                         #-- ta [time,lev,ncells]
var      =  var - 273.15                            #-- convert to degrees Celsius

title    = 'Matplotlib: ICON model data'            #-- plot title string

#-- define _FillValue and missing_value if not existing
missing = -1e20

if not hasattr(var,'_FillValue'):
   var._FillValue  =  missing                       #-- set _FillValue
if not hasattr(var,'missing_value'): 
Esempio n. 23
0
multStd = 2. # detect ToE at multStd std dev of piControl

labBowl = ['piControl', '1pctCO2']

valmask = 1.e20

iniyear = 0
finalyear = 140
deltay = 10.


# ----- Variables ------

# Choose random file to read only the basic variables and properties common to all files
file = 'cmip5.' + models[0]['name'] + '.1pctCO2.ensm.an.ocn.Omon.density.ver-' + models[0]['file_end_CO2'] + '_zon2D.nc'
f = open_ncfile(indir_1pctCO2 + file,'r')

lat = f.variables['latitude'][:]; latN = lat.size
density = f.variables['lev'][:]; levN = density.size
time = f.variables['time'][:]; timN = time.size
var = varname['var_zonal']

# Define variable properties
legVar = varname['legVar']

# ----- Compute ToE for each model ------

# -- Initialize toe
toe_a = np.ma.ones((len(models), levN, latN))*1.
toe_p = np.ma.ones((len(models), levN, latN))*1.
toe_i = np.ma.ones((len(models), levN, latN))*1.
Esempio n. 24
0
"""

import numpy as np
import matplotlib.pyplot as plt
from netCDF4 import Dataset as open_ncfile
from maps_matplot_lib import defVarmme, custom_div_cmap, averageDom
from modelsDef import defModelsCO2piC
from libToE import ToEdomain1pctCO2vsPiC

# ----- mme -----

# mme 1pct CO2
indir_1pctCO2 = '/data/ericglod/Density_binning/Prod_density_april15/mme_1pctCO2/'
file = 'cmip5.multimodel_piCtl.1pctCO2.ensm.an.ocn.Omon.density_zon2D.nc'
data = indir_1pctCO2 + file
fCO2mme = open_ncfile(data,'r')

# mme PiControl
indir_piC = '/data/ericglod/Density_binning/Prod_density_april15/mme_piControl/'
file = 'cmip5.multimodel_1pct.piControl.ensm.an.ocn.Omon.density_zon2D.nc'
data = indir_piC + file
fpiCmme = open_ncfile(data,'r')

# Read main variables
lat = fCO2mme.variables['latitude'][:]
time = fCO2mme.variables['time'][:]
varname = defVarmme('salinity'); v = 'S'
density = fCO2mme.variables['lev'][:]
var = varname['var_zonal_w/bowl']

models = defModelsCO2piC()
multStd = 2. # detect ToE at multStd std dev of histNat

labBowl = ['histNat', 'hist']

valmask = 1.e20

iniyear = 1860
finalyear = 2005
deltay = 10.


# ----- Variables ------

# Choose random file to read only the basic variables and properties common to all files
file = 'cmip5.' + models[0]['name'] + '.historical.ensm.an.ocn.Omon.density.ver-' + models[0]['file_end_hist'] + '_zon2D.nc'
f = open_ncfile(indir_toe_1 + file,'r')

lat = f.variables['latitude'][:]; latN = lat.size
density = f.variables['lev'][:]; levN = density.size
time = f.variables['time'][:]; timN = time.size
var = varname['var_zonal_w/bowl']

# Define variable properties
legVar = varname['legVar']


# ----- Read ToE for each model ------
nruns = 0 # Initialize total number of runs
nrunmax = 100
nMembers = np.ma.empty(len(models)) # Initialize array for keeping nb of members per model
# -- Initialize ToE arrays containing ToE of all runs
Esempio n. 26
0
# density domain
domrho = [21., 26., 28.]  # min/mid/max
delrho = [.5, .2]
#
# -------------------------------------------------------------------------------

# -- Define variable properties

var = varname['var']
minmax = varname['minmax']
clevsm = varname['clevsm']
legVar = varname['legVar']
unit = varname['unit']

# -- Open netcdf files
nc1d = open_ncfile(indir + '/' + file1d)
nc2d = open_ncfile(indir + '/' + file2d)

# -- Read variables
# Restrict variables to bowl
if restrictBowl:
    varb = 'Bowl'
else:
    varb=''
tvara = nc2d.variables[var + varb][:, 1, :, :].squeeze()
tvarp = nc2d.variables[var + varb][:, 2, :, :].squeeze()
tvari = nc2d.variables[var + varb][:, 3, :, :].squeeze()
lev = nc2d.variables['lev'][:]
lat = nc2d.variables['latitude'][:]
# Read model agreement variables
if modelAgree:
Esempio n. 27
0
    ####################################################################################################################
    # To add a new dataset, nothing beyond this point needs to be altered.
    ####################################################################################################################

    # Reorder the data as needed
    if order != [0, 1, 2, 3, 4, 5]:
        files = [files[i] for i in order]
        names = [names[i] for i in order]
        colours = [colours[i] for i in order]
        authors = [authors[i] for i in order]
        links = [links[i] for i in order]

    # Build a list of dictionaries containing the datasets and their relevant data
    Datasets = []
    for index, file in enumerate(files):
        nc = open_ncfile(files[index])
        current_set = {
            'name': names[index],
            'file': file,
            'nc data': nc,
            'AOD': nc.variables['aod'][0][:][:],
            'GM_AOD': nc.variables['gm_aod'][0][:],
            'Latitude': nc.variables['latitude'][:],
            'Time': nc.variables['years'][:]
        }
        Datasets.append(current_set)
    # Convert list to a dictionary for easy name lookup later
    Datasets = {names[i]: Datasets[i] for i in range(len(Datasets))}

    # Build a list of the plot tabs we are creating
    tabs = []
Esempio n. 28
0
def read_gsat_rcp85(indir, listfiles, ignore):
    """
    Read GSAT array from different model files, save anomaly in one ouput array with one dimension being total number
    of runs, the other being the time
    :param indir: directories where GSAT files are stored
           listfiles: list of ToE files to read (one file per model) to keep same order as when reading ToE
           ignore: list of models to ignore
    :return: GSAT anomaly array (years, nb of runs)

    """

    nruns = 0
    nrunmax = 100
    nmodels = len(listfiles)
    nMembers = np.ma.empty(nmodels)
    timN = 240

    # -- Initialize GSAT
    gsat_anom = np.ma.masked_all((timN, nrunmax))

    for i in range(nmodels):
        file_toe = listfiles[
            i]  # Read toe file in the same order to retrieve model name
        name = os.path.basename(file_toe).split('.')[1]
        ftoe = open_ncfile(file_toe, 'r')
        run_labels = ftoe.variables[
            'run_label'][:]  # Read run labels of model i
        if name != 'HadGEM2-ES':
            iystart = 11
        else:
            iystart = 2

        if name not in ignore:

            # Read GSAT
            file_gsat = glob.glob(indir + 'GSAT.*' + name +
                                  '*.nc')[0]  # GSAT File
            fgsat = open_ncfile(file_gsat, 'r')
            gsatread = fgsat.variables['GSAT'][:]
            run_labels_GSAT = fgsat.variables['members_name'][:]
            nMembers[i] = int(gsatread.shape[1])
            print('- Reading GSAT of %s with %d members' % (name, nMembers[i]))
            # print('  gsatread shape : ',gsatread.shape)
            nruns1 = int(nruns + nMembers[i])

            # Re-organize order of members so that it's the same as ToE array
            gsatread_cor = np.ma.masked_all_like(gsatread)
            for k in range(int(nMembers[i])):
                idx_cor = list(run_labels).index(run_labels_GSAT[k])
                gsatread_cor[:, idx_cor] = gsatread[:, k]
                #print('  ',k,run_labels_GSAT[k], run_labels[k], run_labels[idx_cor])

            # Save GSAT
            gsatread_anom = gsatread_cor - np.ma.average(
                gsatread_cor[0:50, :],
                axis=0)  # Anomaly relative to first 50 years
            gsat_anom[:, nruns:nruns1] = gsatread_anom[
                iystart:, :]  # Keep 1861-2005
            # print('  gsatanom shape : ',gsat_anom.shape)
            nruns = nruns1

    print('Total number of runs:', nruns)
    gsat_anom = gsat_anom[:, 0:nruns]

    return gsat_anom
method_noise = 'average_piC' # Average PiC in the specified domains then determine the std of this averaged value

domains = ['Southern ST', 'SO', 'Northern ST', 'North Atlantic', 'North Pacific']

multStd = 2. # detect ToE at multStd std dev of histNat

iniyear = 0
finalyear = 140
deltay = 10.


# ----- Variables ------

# -- Choose random file to read only the basic variables and properties common to all files
file = 'cmip5.' + models[0]['name'] + '.1pctCO2.ensm.an.ocn.Omon.density.ver-' + models[0]['file_end_CO2'] + '_zon2D.nc'
f = open_ncfile(indir_1pctCO2 + file,'r')

lat = f.variables['latitude'][:]; latN = lat.size
density = f.variables['lev'][:]; levN = density.size
time = f.variables['time'][:]; timN = time.size
basinN = 4
var = varname['var_zonal']

# -- Define variable properties
legVar = varname['legVar']
unit = varname['unit']


# ----- Average signal and noise and compute ToE for each model ------

for i, model in enumerate(models):
import matplotlib.pyplot as plt
from netCDF4 import Dataset as open_ncfile
from maps_matplot_lib import defVarmme, zonal_2D, custom_div_cmap
from scipy import interpolate

# ----- Workspace ------

indir = '/data/ericglod/Density_binning/Prod_density_april15/mme_hist/'
file_2D = 'cmip5.multimodel_Nat.historical.ensm.an.ocn.Omon.density_zon2D.nc'
file_1D = 'cmip5.multimodel_Nat.historical.ensm.an.ocn.Omon.density_zon1D.nc'
name = 'mme_hist'

data_2D = indir + file_2D
data_1D = indir + file_1D

f2D = open_ncfile(data_2D,'r')
f1D = open_ncfile(data_1D,'r')

# ----- Variables ------

# -- Read variables
lat = f2D.variables['latitude'][:]
density = np.array([19.0, 19.2, 19.4, 19.6, 19.8, 20.0, 20.2, 20.4, 20.6, 20.8, 21.0, 21.2,
    21.4, 21.6, 21.8, 22.0, 22.2, 22.4, 22.6, 22.8, 23.0, 23.2, 23.4, 23.6,
    23.8, 24.0, 24.2, 24.4, 24.6, 24.8, 25.0, 25.2, 25.4, 25.6, 25.8, 26.0,
    26.1, 26.2, 26.3, 26.4, 26.5, 26.6, 26.7, 26.8, 26.9, 27.0, 27.1, 27.2,
    27.3, 27.4, 27.5, 27.6, 27.7, 27.8, 27.9, 28.0, 28.1, 28.2, 28.3, 28.4, 28.5])

varname = defVarmme('salinity')
#varname = defVarmme('temp')
#depth = defVarmme('depth')
domrho = [21., 26., 28.]  # min/mid/max
#
# -------------------------------------------------------------------------------

# -- Define variable properties

var = varname['var']
minmax = varname['minmax']
clevsm = varname['clevsm']
legVar = varname['legVar']
unit = varname['unit']

# -- Open netcdf files model 1
print
print file1H2d
nchist2d = open_ncfile(indirHist + '/' + file1H2d)
nchist1d = open_ncfile(indirHist + '/' + file1H1d)
print file1HN2d
nchistn2d = open_ncfile(indirHistN + '/' + file1HN2d)
nchistn1d = open_ncfile(indirHistN + '/' + file1HN1d)
print

agreelev = 0.6  # not used

# -- Read variables
# Restrict variables to bowl
tvar = nchist2d.variables[var]
tvarn = nchistn2d.variables[var]
lev = nchist2d.variables['lev'][:]
lat = nchist2d.variables['latitude'][:]
Esempio n. 32
0
varToEA = np.ma.masked_all((nrunmax, len(domains)))
varToEP = np.ma.masked_all((nrunmax, len(domains)))
varToEI = np.ma.masked_all((nrunmax, len(domains)))

# -- Loop over models
if use_piC == True:
    indir = indir_rcppiC
else:
    indir = indir_rcphn
listfiles = glob.glob(indir + method_noise_rcphn + '/*.nc')
nmodels = len(listfiles)

for i in range(nmodels):

    file_toe = listfiles[i]
    ftoe = open_ncfile(file_toe, 'r')
    name = os.path.basename(file_toe).split('.')[1]
    # Read ToE (members, basin, domain)
    toe2read = ftoe.variables[var + 'ToE2'][:]
    nMembers[i] = toe2read.shape[0]
    print('- Reading ToE of %s with %d members' % (name, nMembers[i]))
    nruns1 = nruns + nMembers[i]

    # Save ToE
    varToEA[nruns:nruns1, :] = toe2read[:, 1, :]
    varToEP[nruns:nruns1, :] = toe2read[:, 2, :]
    varToEI[nruns:nruns1, :] = toe2read[:, 3, :]

    nruns = nruns1

print('Total number of runs:', nruns)
varname = defVarmme('salinity'); v = 'S'

multStd = 2. # detect ToE at multStd std dev of histNat

use_piC = False # Over projection period, signal = RCP-average(histNat), noise = std(histNat)
# use_piC = True # Over projection period, signal = RCP-average(PiControl), noise = std(PiControl)

iniyear = 1860
finalyear = 2100
deltay = 10.

# Choose random file to read only the basic variables and properties common to all files
file = 'cmip5.' + models[1]['name'] + '.historicalNat.ensm.an.ocn.Omon.density.ver-' + \
       models[1]['file_end_histNat'] + '_zon2D.nc'
f = open_ncfile(indir_histNat + file,'r')

lat = f.variables['latitude'][:]; latN = lat.size
density = f.variables['lev'][:]; levN = density.size
print(density)
timN = 240
var = varname['var_zonal_w/bowl']
basinN = 4

# Define variable properties
legVar = varname['legVar']

# ----- Compute zonal ToE for each simulation ------

nMembers = np.ma.zeros(len(models)) # Initialize array for keeping nb of members per model
"""
  PyEarthScience: PyNGL contour plot example
  
   - filled contour over map plot
   - rectilinear grid (lat/lon)
   - colorbar
   
   09.10.15  kmf
"""
from   mpl_toolkits.basemap import Basemap, cm
import matplotlib.pyplot as plt
from   netCDF4 import Dataset as open_ncfile
import numpy as np

#-- open netcdf file
nc = open_ncfile('/Users/k204045/NCL/general/data/new_data/rectilinear_grid_2D.nc')

#-- read variable
var = nc.variables['tsurf'][0,:,:]
lat = nc.variables['lat'][:]
lon = nc.variables['lon'][:]

#-- create figure and axes instances
fig = plt.figure(figsize=(8,8))
ax  = fig.add_axes([0.1,0.1,0.8,0.9])

#-- create map
map = Basemap(projection='cyl',llcrnrlat= -90.,urcrnrlat= 90.,\
              resolution='c',  llcrnrlon=-180.,urcrnrlon=180.)

#-- draw coastlines, state and country boundaries, edge of map
Esempio n. 35
0
"""

import numpy as np
import matplotlib.pyplot as plt
from netCDF4 import Dataset as open_ncfile
from maps_matplot_lib import defVarDurack, zonal_2D, custom_div_cmap

# ----- Workspace ------

indir = '/data/ericglod/Density_binning/Obs_Prod_density_april16/'
file = 'DurackandWijffels_GlobalOceanChanges-NeutralDensity_1950-2000_120209_11_46_11_beta.nc'
name = 'Durack & Wijffels'

data = indir + file

f = open_ncfile(data,'r')

# ----- Variables ------

# -- Read variables
density = f.variables['density'][:]
lat = f.variables['latitude'][:]

varname = defVarDurack('salinity')
#varname = defVarDurack('temp')

# -- Define variable properties
minmax = varname['minmax_zonal']
clevsm = varname['clevsm_zonal']
clevsm_bold = varname['clevsm_bold']
legVar = varname['legVar']
Esempio n. 36
0
#name = 'Ishii'
name = 'mme'

if name == 'Ishii' :
    indir = '/data/ericglod/Density_binning/Obs_Prod_density_april16/'
    file = 'obs.Ishii.historical.r0i0p0.an.ocn.Omon.density.ver-1.latestXCorr.nc'
    ### Ishii : 1945 to 2013
else :
    indir = '/data/ericglod/Density_binning/Prod_density_april15/Raw/mme_hist/'
    file = 'cmip5.multimodel_Nat.historical.ensm.an.ocn.Omon.density_2D.nc'
    ### mme : 1861 to 2006

data = indir + file

f = open_ncfile(data,'r')

# Read grid for coloring continents
f2 = open_ncfile('/home/ysilvy/Density_bining/Yona_analysis/data/140807_WOD13_masks.nc', 'r')


# ----- Variables ------

# Read variables
lat = f.variables['latitude'][:]
lon = f.variables['longitude'][:]
ptopsigma = f.variables['ptopsigmaxy'][:]
ptopsalinity = f.variables['ptopsoxy'][:]


landsea = f2.variables['landsea'][:]
Esempio n. 37
0
def proj_map_zonal_changes(kind, zonal_change, plt, ax1, ax2, minmax, clevsm, lat, lon, cmap, isopyc,
                           sliced_density, var1, var2 = None):


    isopyc_idx = np.argmin(np.abs(sliced_density - isopyc))

    if kind == 'model':
        var_hist = np.squeeze(var1[:,isopyc_idx,:])
        var_histNat = np.squeeze(var2[:,isopyc_idx,:])
        # Difference
        var_diff = np.ma.average(var_hist[-6:, :], axis=0) - np.ma.average(var_histNat[-6,:, :], axis=0)
        # Climatology
        var_mean = np.ma.average(var_hist, axis=0)

    elif kind == 'Durack':
        var_diff = np.squeeze(var1[isopyc_idx,:])
        var_mean = np.squeeze(var2[isopyc_idx,:])

    else:
        var_obs = np.squeeze(var1[:,isopyc_idx,:])
        var_diff = np.ma.average(var_obs[-6:,:], axis=0) - np.ma.average(var_obs[0:5,:], axis=0)
        # Climatology
        var_mean = np.ma.average(var_obs, axis=0)

    # Build zonal mean(s) and remove from var_diff
    zonal_mean = np.ma.average(var_diff, axis=1)
    zonal_mean_2D = np.tile(zonal_mean,(len(lon),1))
    zonal_mean_2D = np.transpose(zonal_mean_2D)
    if zonal_change == 'global':
        var_diff = var_diff - zonal_mean_2D
    if zonal_change == 'basin' and kind == 'Durack' :
        # Read basin mask
        f3 = open_ncfile('/home/ysilvy/data/DurackandWijffels_GlobalOceanSurfaceChanges_1950-2000_mask.nc', 'r')
        basin_mask = f3.variables['basin_mask'][:]
        # Build zonal means for each basin
        mask_p = basin_mask != 1
        mask_a = basin_mask != 2
        mask_i = basin_mask != 3
        var_diff_p = np.ma.array(var_diff, mask=mask_p)
        var_diff_a = np.ma.array(var_diff, mask=mask_a)
        var_diff_i = np.ma.array(var_diff, mask=mask_i)
        zonal_mean_p = np.ma.average(var_diff_p, axis=1)
        zonal_mean_a = np.ma.average(var_diff_a, axis=1)
        zonal_mean_i = np.ma.average(var_diff_i, axis=1)
        zonal_mean_p = np.tile(zonal_mean_p, (len(lon),1)); zonal_mean_p = np.transpose(zonal_mean_p)
        zonal_mean_a = np.tile(zonal_mean_a, (len(lon), 1)); zonal_mean_a = np.transpose(zonal_mean_a)
        zonal_mean_i = np.tile(zonal_mean_i, (len(lon), 1)); zonal_mean_i = np.transpose(zonal_mean_i)
        var_diff[basin_mask==1] = var_diff[basin_mask==1] - zonal_mean_p[basin_mask==1]
        var_diff[basin_mask==2] = var_diff[basin_mask==2] - zonal_mean_a[basin_mask==2]
        var_diff[basin_mask==3] = var_diff[basin_mask==3] - zonal_mean_i[basin_mask==3]


    # Create meshgrid
    lon2d, lat2d = np.meshgrid(lon, lat)

    # Levels for shade plot
    #levels = MaxNLocator(nbins=minmax[2]).tick_values(minmax[0], minmax[1])
    levels = np.linspace(minmax[0], minmax[1], minmax[2])

    # Read grid for coloring continents
    f2 = open_ncfile('/home/ysilvy/Density_bining/Yona_analysis/data/140807_WOD13_masks.nc', 'r')
    landsea = f2.variables['landsea'][:]

    # Create mask
    sea_mask = landsea != 1
    landsea = np.ma.array(landsea, mask=sea_mask)
    landsea[landsea == 1] = 0.2

    # Basemap
    map = Basemap(projection='cyl', llcrnrlon=20, llcrnrlat=-70, urcrnrlon=380, urcrnrlat=70, ax=ax1)
    map.drawmapboundary(fill_color='1')
    map.drawparallels(np.arange(-60, 61, 20.), labels=[1, 1, 0, 0], linewidth=0.5)
    map.drawmeridians(np.arange(-180, 180, 60), labels=[0, 0, 0, 1], linewidth=0.5)

    # Draw filled contours of diff
    cnplot = map.contourf(lon2d, lat2d, var_diff, cmap=cmap, levels = levels, latlon=True, extend='both')

    if kind == 'Durack':
        map.fillcontinents(color='black')
    else :
        # Draw continents
        pc2 = map.pcolormesh(lon2d, lat2d, landsea, shading='flat', cmap=plt.cm.gray, latlon=True)

    # Indicate which isopycnal we're projecting on
    ax1.text(0.1, 0.85, '$\gamma = %.1f$' %(isopyc,), transform=ax1.transAxes, fontweight='bold',color='w', fontsize=17)


    # Plot zonal mean
    ax2.plot(zonal_mean, lat, color='purple', linewidth=1.5)

    # Set axis parameters
    ax2.set_xlim(minmax[0], minmax[1])
    ax2.set_ylim(-70,70)
    ax2.tick_params(
        axis='both',  # changes apply to both axes
        which='both',  # both major and minor ticks are affected
        top='off',  # ticks along the top edge are off
        labelleft='off',
        #left='off',
        right='off',
        labelright='off')
    ax2.spines['left'].set_position('zero')
    ax2.spines['right'].set_color('none')
    ax2.spines['top'].set_color('none')
    ax2.set_yticks([-60,-40,-20,0,20,40,60])
    labels = ax2.get_xticklabels()
    plt.setp(labels, rotation=45, fontsize=10)
    ax2.grid(True)

    return cnplot, levels
# ----- Work ------

varname = defVarmme('salinity'); v = 'S'
# varname = defVarmme('temp'); v = 'T'

iniyear = 1860
finalyear = 2005


# ----- Variables ------

domains = ['Southern ST', 'SO', 'Northern ST', 'North Atlantic', 'North Pacific']

# Choose random file to read only the basic variables and properties common to all files
file = 'cmip5.' + models[0]['name'] + '.historicalNat.r1i1p1.an.ocn.Omon.density.ver-' + models[0]['file_end_histNat'] + '_zon2D.nc'
f = open_ncfile(indir_histNat + file,'r')

lat = f.variables['latitude'][:]; latN = lat.size
density = f.variables['lev'][:]; levN = density.size
time = f.variables['time'][:]; timN = time.size
basinN = 4
var = varname['var_zonal_w/bowl']
legVar = varname['legVar']
unit = varname['unit']


# ----- Compute noise and average in domains for each run, then save in output file for each model -----

# == Historical and historicalNat + PiControl in histvshistNat boxes (for RCP8.5 ToE calculation) ==

nMembers_h = np.ma.empty(len(models)) # Initialize array for keeping nb of members per model
Esempio n. 39
0
indir_piC = '/data/ericglod/Density_binning/Prod_density_april15/mme_piControl/'

models = defModelsCO2piC()

# ----- Work ------

varname = defVarmme('salinity')
v = 'S'

multStd = 2.  # detect ToE at multStd std dev of histNat

# -- Choose random file to read only the basic variables and properties common to all files
file = 'cmip5.' + models[0][
    'name'] + '.1pctCO2.ensm.an.ocn.Omon.density.ver-' + models[0][
        'file_end_CO2'] + '_zon2D.nc'
f = open_ncfile(indir_CO2 + file, 'r')

lat = f.variables['latitude'][:]
latN = lat.size
lev = f.variables['lev'][:]
levN = lev.size
time = f.variables['time'][:]
timN = time.size
var = varname['var_zonal_w/bowl']
basinN = 4

# Define variable properties
legVar = varname['legVar']
unit = varname['unit']

# ----- Compute zonal ToE for each model ------
Esempio n. 40
0
markers = ['o','v','^','8','s','p','h','D','d','*','>']

var = varname['var_zonal_w/bowl']

# ----- Read ToE and noise for each run ------

# ToEhn, ToEpiC, noisehn, noisepiC
listfiles_rcphn = glob.glob(indir_rcphn + method_noise + '/*.nc')
nmodels = len(listfiles_rcphn)
model_names = np.empty(nmodels).astype('S20')

for i in range(nmodels):

    # Read ToE RCP8.5 vs. histNat (members, basin, domain)
    filetoehn = listfiles_rcphn[i]
    ftoehn = open_ncfile(filetoehn)
    name = os.path.basename(filetoehn).split('.')[1]
    model_names[i] = name
    print(name)
    toehnread = ftoehn.variables[var + 'ToE2'][:]

    # Read noise histNat
    if method_noise == 'average_histNat':
        # Here we read the std of the averaged histNat for all runs, then take the max as our noise
        filenoise = 'cmip5.' + name + '.noise_domains_hist_histNat.std_of_average.nc'
        fnoise = open_ncfile(indir_noise + filenoise,'r')
        varstdhn = fnoise.variables[var+'stdhn'][:] # Already averaged in the domains (members,basin,domain)
        varnoisehn = np.ma.max(varstdhn,axis=0)
    else:
        # Read histNat ensemble mean
        filehn = glob.glob(indir_histNat + 'cmip5.' + name + '.'+'*.zon2D.nc')[0]
Esempio n. 41
0
domains = ['Southern ST', 'SO', 'Northern ST', 'North Atlantic', 'North Pacific']
domain_name = 'Southern ST'
idomain = 0
signal_domain = 'fresher'

ibasin = 1 ; basin_name = 'Atlantic' # Atlantic southern ST
# ibasin = 2 ; basin_name = 'Pacific' # Pacific southern ST

# use_piC = False # Over projection period, signal = RCP-average(histNat), noise = std(histNat)
use_piC = True # Over projection period, signal = RCP-average(PiControl), noise = std(PiControl)

# Choose random file to read only the basic variables and properties common to all files
file = 'cmip5.' + models[1]['name'] + '.historicalNat.ensm.an.ocn.Omon.density.ver-' + \
       models[1]['file_end_histNat'] + '_zon2D.nc'
f = open_ncfile(indir_histNat + file,'r')

lat = f.variables['latitude'][:]; latN = lat.size
density = f.variables['lev'][:]; levN = density.size
timN = 240
time_label = np.arange(1860,2100)
var = varname['var_zonal_w/bowl']
basinN = 4

# Define variable properties
legVar = varname['legVar']


# ----- Initialize plot ------

if use_piC == True:
"""

import numpy as np
import matplotlib.pyplot as plt
import os, glob
from netCDF4 import Dataset as open_ncfile
import datetime

# ===
# === WORKSPACE AND PRE-REQUISITES ===
# ===

# Read EN4 file : will be our climatology
indir = '/home/ericglod/Density_bining/test/'
file = 'EN4.mon.ocean.Omon.1900_2017.density.nc'  #'obs.EN4.historical.r0i0p0.mo.ocn.Omon.density.ver-1.latestX_zon2D.nc'
f = open_ncfile(indir + file, 'r')

# Read variables
lat = f.variables['latitude'][:]
density = f.variables['lev'][:]
isonvol = np.ma.average(
    f.variables['isonvol'][:, :, :, :],
    axis=0) * 1.e03  # Volume of isopycnals in km3, make climatology

# Dimensions
basinN = 4
latN = len(lat)
densityN = len(density)

# Z grid for calculating ocean volume per depth level
#gridz = np.arange(0,5501,5)
Esempio n. 43
0
    longName = 'historicalNat'
    dir = 'histNat_std/'
else:
    indir_z = indir_piC_remap
    indir = indir_piC
    longName = 'piControl'
    dir = 'piControl_std/'

# ==== Read remapped std from file ====

listfiles = sorted(glob.glob(indir_z+'*.nc'))
# -- Loop on models
for i in range(len(listfiles)):
# i=0
    file = os.path.basename(listfiles[i])
    f = open_ncfile(indir_z+file,'r')
    name = file.split('.')[1] # Read model name

    print('Reading '+work+' for '+name)

    stdvar_z = f.variables[var+'Std'][:]
    lat = f.variables['latitude'][:]
    pseudo_depth = f.variables['pseudo_depth'][:]

    # Read bowl and take the average
    file2 = glob.glob(indir+'/*'+name+'*1D.nc')[0]
    f2 = open_ncfile(file2,'r')
    if work == 'histNat':
        bowl = f2.variables['ptopdepth'][:]
    else:
        bowl = f2.variables['ptopdepth'][-240:,:,:]
# varname = defVarmme('depth'); v = 'Z'

iniyear = 1860
finalyear = 2005

# ----- Variables ------

domains = [
    'Southern ST', 'SO', 'Northern ST', 'North Atlantic', 'North Pacific'
]

# Choose random file to read only the basic variables and properties common to all files
file = 'cmip5.' + models[0][
    'name'] + '.historicalNat.r1i1p1.an.ocn.Omon.density.ver-' + models[0][
        'file_end_histNat'] + '_zon2D.nc'
f = open_ncfile(indir_histNat + file, 'r')

lat = f.variables['latitude'][:]
latN = lat.size
density = f.variables['lev'][:]
levN = density.size
time = f.variables['time'][:]
timN = time.size
basinN = 4
var = varname['var_zonal_w/bowl']
legVar = varname['legVar']
unit = varname['unit']

# ----- Compute noise and average in domains for each run, then save in output file for each model -----

# == Historical and historicalNat + PiControl in RCP8.5vshistNat boxes (for RCP8.5 ToE calculation) ==
Esempio n. 45
0
# == Historical vs. historicalNat ==

nruns = 0 # Initialize total number of runs
nrunmax = 100
nMembers = np.ma.zeros(len(models)) # Initialize array for keeping number of members per model

# -- Initialize varToE containing ToE of all runs
varToEA = np.ma.masked_all((nrunmax, len(domains)))
varToEP = np.ma.masked_all((nrunmax, len(domains)))
varToEI = np.ma.masked_all((nrunmax, len(domains)))

# -- Loop over models
for i, model in enumerate(models):

    file_toe = 'cmip5.' + model['name'] + '.toe_histNat_method2_' + method_noise_hn + '.nc'
    ftoe = open_ncfile(indir_hhn + method_noise_hn + '/' + file_toe, 'r')

    # Read ToE (members, basin, domain)
    toe2read = ftoe.variables[var + 'ToE2'][:]

    if runs == 'all':
        nMembers[i] = toe2read.shape[0]
        print('- Reading ToE of',model['name'], 'with', nMembers[i], 'members')
        nruns1 = nruns + nMembers[i]

        # Save ToE
        varToEA[nruns:nruns1,:] = toe2read[:,1,:]
        varToEP[nruns:nruns1,:] = toe2read[:,2,:]
        varToEI[nruns:nruns1,:] = toe2read[:,3,:]

        nruns = nruns1
Esempio n. 46
0
unit = varname['unit']

iniyear = 1860
finalyear = 2005

plotName = 'cmip5_remap_test' + varname['var']

# -------------------------------------------------------------------------------
# file inits

os.chdir(inDir)
file='cmip5.multimodel_All.historical.ensm.an.ocn.Omon.density_zon2D.nc'
var = varname['var']

# find dimensions
fi = open_ncfile(inDir+'/'+file)
print inDir+'/'+file

fieldr = fi.variables[var][:]
depthr = fi.variables['isondepth'][:]
volumr = fi.variables['isonvol'][:]
lat = fi.variables['latitude'][:]

valmask = 1.e20

# Target grid

targetz = [0.,5.,15.,25.,35.,45.,55.,65.,75.,85.,95.,105.,116.,128.,142.,158.,181.,216.,272.,364.,511.,732.,1033.,1405.,1830.,2289.,2768.,3257.,3752.,4350.,4749.,5250.]

# Remap
fieldz = remapToZ(fieldr.data,depthr.data,volumr.data, valmask, targetz)
Esempio n. 47
0
multStd = 2. # detect ToE at multStd std dev of histNat (or PiControl)
# In fact now we save both 1 std and multStd=2 std

# use_piC = False # Over projection period, signal = RCP-average(histNat), noise = std(histNat)
use_piC = True # Over projection period, signal = RCP-average(PiControl), noise = std(PiControl)

iniyear = 1860
finalyear = 2100
deltay = 10.


# Choose random file to read only the basic variables and properties common to all files
file = 'cmip5.' + models[1]['name'] + '.historicalNat.ensm.an.ocn.Omon.density.ver-' + \
       models[1]['file_end_histNat'] + '_zon2D.nc'
f = open_ncfile(indir_histNat + file,'r')

lat = f.variables['latitude'][:]; latN = lat.size
density = f.variables['lev'][:]; levN = density.size
timN = 240
var = varname['var_zonal_w/bowl']
basinN = 4

# Define variable properties
legVar = varname['legVar']

# ----- Average signal and noise and compute ToE for each simulation ------

nMembers = np.ma.zeros(len(models)) # Initialize array for keeping nb of members per model

for i, model in enumerate(models):
Esempio n. 48
0
# use cdo yseasmean to compute seasonal climatology
run1 = 'CM61-LR-hist-03.2110'
file1  = run1+'_1950_2009_seasmean_transf_north40.nc'
run2 = 'CM6-pace-TSTr8fgT'
file2  = run2+'_1950_2009_seasmean_transf_north40.nc'
file2c  = run2+'_1950_2009_seasmean_transf_north40_corr.nc'
run3 = 'CM6-pace-TSTr8vg'
file3  = run3+'_1950_1999_seasmean_transf_north40.nc'
file3c  = run3+'_1950_1999_seasmean_transf_north40_corr.nc'
run4 = 'CM6-pace-TSTr8vgS0'
file4  = run4+'_1950_2009_seasmean_transf_north40.nc'
file4c  = run4+'_1950_2009_seasmean_transf_north40_corr.nc'

#
# -- Open netcdf files
nc1 = open_ncfile(inDir + run1 + '/' + file1)
nc2 = open_ncfile(inDir + run2 + '/' + file2)
nc3 = open_ncfile(inDir + run3 + '/' + file3)
nc4 = open_ncfile(inDir + run4 + '/' + file4)
nc2c = open_ncfile(inDir + run2 + '/' + file2c)
nc3c = open_ncfile(inDir + run3 + '/' + file3c)
nc4c = open_ncfile(inDir + run4 + '/' + file4c)

# -- Read variables for North Atl (anual mean from cdo yearmean on monthly data)

trfatltot1 = nc1.variables['trsftotAtl'][0,:].squeeze()
trfatlhef1 = nc1.variables['trsfhefAtl'][0,:].squeeze()
trfatlwfo1 = nc1.variables['trsfwfoAtl'][0,:].squeeze()

trfatltot2 = nc2.variables['trsftotAtl'][0,:].squeeze()
trfatlhef2 = nc2.variables['trsfhefAtl'][0,:].squeeze()
Esempio n. 49
0
# density domain
rhomin = 21
rhomid = 26
rhomax = 28
domrho = [rhomin, rhomid, rhomax]

# ----- Variables ------
var = varname['var_zonal_w/bowl']
legVar = varname['legVar']
unit = varname['unit']

# Read latitude and density from original file
fileh_2d = '/data/ericglod/Density_binning/Prod_density_april15/mme_hist/' \
       'cmip5.multimodel_Nat.historical.ensm.an.ocn.Omon.density_zon2D.nc'
fh2d = open_ncfile(fileh_2d, 'r')
lat = fh2d.variables['latitude'][:]
latN = len(lat)
density = fh2d.variables['lev'][:]
levN = len(density)
basinN = 4

# ------------------------------------
# ----- Read ToE for each model ------
# ------------------------------------

# == Historical + RCP8.5 vs. historicalNat or vs. PiControl ==

nruns = 0  # Initialize total number of runs
nrunmax = 100
nMembers = np.ma.zeros(
Esempio n. 50
0
"""

import numpy as np
import matplotlib.pyplot as plt
from netCDF4 import Dataset as open_ncfile
from maps_matplot_lib import proj_map, proj_map_zonal_changes, custom_div_cmap, defVarDurack

# ----- Workspace ------

indir = '/data/ericglod/Density_binning/Obs_Prod_density_april16/'
file = 'DurackandWijffels_GlobalOceanChanges-NeutralDensity_1950-2000_120209_11_46_11_beta.nc'
name = 'Durack & Wijffels'

data = indir + file

f = open_ncfile(data, 'r')

# ----- Variables ------

# Read variables
density = f.variables['density'][:]
lat = f.variables['latitude'][:]
lon = f.variables['longitude'][:]

varname = defVarDurack('temp')
v = 'T'
#varname = defVarDurack('salinity'); v='S'

# -- Look at zonal differences or not
zonal_change = 'No'
#zonal_change = 'global' # Zonal mean is global
Esempio n. 51
0
if name == 'mme_hist_histNat':
    indirh = '/data/ericglod/Density_binning/Prod_density_april15/mme_hist/'
    fileh_2d = 'cmip5.multimodel_Nat.historical.ensm.an.ocn.Omon.density_zon2D.nc'
    # fileh_2d = 'cmip5.CCSM4.historical.ensm.an.ocn.Omon.density.ver-v20121128_zon2D.nc'
    fileh_1d = 'cmip5.multimodel_Nat.historical.ensm.an.ocn.Omon.density_zon1D.nc'
    # fileh_1d = 'cmip5.CCSM4.historical.ensm.an.ocn.Omon.density.ver-v20121128_zon1D.nc'
    datah_2d = indirh + fileh_2d
    datah_1d = indirh + fileh_1d
    indirhn = '/data/ericglod/Density_binning/Prod_density_april15/mme_histNat/'
    filehn_2d = 'cmip5.multimodel_Nat.historicalNat.ensm.an.ocn.Omon.density_zon2D.nc'
    # filehn_2d = 'cmip5.CCSM4.historicalNat.ensm.an.ocn.Omon.density.ver-v20121128_zon2D.nc'
    filehn_1d = 'cmip5.multimodel_Nat.historicalNat.ensm.an.ocn.Omon.density_zon1D.nc'
    # filehn_1d = 'cmip5.CCSM4.historicalNat.ensm.an.ocn.Omon.density.ver-v20121128_zon1D.nc'
    datahn_2d = indirhn + filehn_2d
    datahn_1d = indirhn + filehn_1d
    fh2d = open_ncfile(datah_2d, 'r')
    fh1d = open_ncfile(datah_1d, 'r')
    fhn2d = open_ncfile(datahn_2d, 'r')
    fhn1d = open_ncfile(datahn_1d, 'r')

    # File for remapped density
    file = 'cmip5.multimodel_Nat.historical.remaptoz_density.zon2D.nc'
    fsigma = open_ncfile(indir_density + file, 'r')

if name == 'mme_hist':
    indir = '/data/ericglod/Density_binning/Prod_density_april15/mme_hist/'
    file_2d = 'cmip5.multimodel_Nat.historical.ensm.an.ocn.Omon.density_zon2D.nc'
    file_1d = 'cmip5.multimodel_Nat.historical.ensm.an.ocn.Omon.density_zon1D.nc'
    data_2d = indir + file_2d
    data_1d = indir + file_1d
    fh2d = open_ncfile(data_2d, 'r')
Esempio n. 52
0
"""

import numpy as np
import matplotlib.pyplot as plt
from netCDF4 import Dataset as open_ncfile
from maps_matplot_lib import zonal_2D, defVarmme, averageDom
from modelsDef import defModels, defModelsCO2piC
from libToE import ToEdomainhistvshistNat

# ----- Workspace ------

indirh = '/data/ericglod/Density_binning/Prod_density_april15/mme_hist/'
fileh = 'cmip5.multimodel_Nat.historical.ensm.an.ocn.Omon.density_zon2D.nc'
indirhn = '/data/ericglod/Density_binning/Prod_density_april15/mme_histNat/'
filehn = 'cmip5.multimodel_All.historicalNat.ensm.an.ocn.Omon.density_zon2D.nc'
fh = open_ncfile(indirh + fileh,'r')
fhn = open_ncfile(indirhn + filehn,'r')


# ----- Work/Variables ------

domains = ['Southern ST', 'SO', 'Northern ST', 'North Atlantic', 'North Pacific']

multStd = 2. # detect ToE at multStd std dev of histNat

varname = defVarmme('salinity'); v = 'S'
#varname = defVarmme('temp'); v = 'T'
#varname= defVarmme('depth'); v = 'Z'

var = varname['var_zonal']
legVar = varname['legVar']