コード例 #1
0
ファイル: CESM_utils_mask.py プロジェクト: LABclimate/MT
def gen_mask_auxgrd_overlay_lat(lat_auxgrd, ncdat):
    ''' Boolean of size (nlatAUXgrid, nlatMODELgrid, nlonMODELgrid)
        It is True where latitudes of auxillary and model grid lie in the same box. 
    '''
    print('> generating mask_auxgrd_overlay_lat')
    lat_mgrdT, iter_lat_auxgrd, iter_lat_mgrdT, iter_lon_mgrdT = vars2speedup(lat_auxgrd, ncdat) 	# np-arrays for speed
    lat_auxgrd = np.array(lat_auxgrd) # for speed
    mask_auxgrd_overlay_lat = np.zeros([len(lat_auxgrd), len(ncdat.nlat), len(ncdat.nlon)],dtype=bool) 	# pre-allocation as False
    for n in iter_lat_auxgrd[:-1]:
      utils_misc.ProgBar('step', step=n, nsteps=len(iter_lat_auxgrd), minbarlen=60)
      for j in iter_lat_mgrdT:
          good_i = np.where(np.greater_equal(lat_mgrdT[j,:], lat_auxgrd[n]) & np.less(lat_mgrdT[j,:], lat_auxgrd[n+1]))
          for i in good_i:
            mask_auxgrd_overlay_lat[n,j,i] = True
    utils_misc.ProgBar('done')

    return(mask_auxgrd_overlay_lat)
コード例 #2
0
ファイル: CESM_utils_MOC.py プロジェクト: LABclimate/MT
def calc_Mxint_auxgrd(lat_ax, zd_ax, M, fraction_mask, ATLiter):
    '''
    Input:
     > lat_ax               : meridional axis of auxgrd | nparray
     > zd_ax                : vertical or density axis of auxgrd | nparray
     > M                    : volume transport (MW, MV, dMW or dMV) | nparray of shape [nz, nlat, nlon]
     > fraction_mask        : mask1TODOC
     > ATLiter              : mask2TODOC
    Output:
     > Mxint                : zonally integrated volume transport of shape [nz, nlat] | xarray
    Steps:
     > use mask (mask_auxgrd) that is 'True' where latitudes of both grids lie in the same box
     > calculate Mxint by zonal integration along aux grid
     > n-loop: over latitude on aux grid
     >   j-loop: over latitudes on model grid
     >     check whether mask_auxgrd of current n, j is True anywhere (just for speeding up)
     >     i-loop: over those longitudes where both, mask_modgrd (regional mask) and mask_auxgrd are True.
                   the order of the iteration is rolled to begin at western boundary of Atlantic
                   but actually, this 
     >       zonal integration by summing up vertical volume transports (M) of model grid for every depth (vectorially)
    '''
    # a few variables to speed up subsequent loops
    iter_lat_ax = np.arange(len(lat_ax))
    iter_lat_M = np.arange(M.shape[1])

    print('> zonal integration')
    Mxint = np.zeros([len(zd_ax), len(lat_ax)])  # pre-allocation with zeros
    for n in iter_lat_ax[:-1]:  #!!
        utils_misc.ProgBar('step', step=n, nsteps=len(iter_lat_ax))
        fract_n = fraction_mask[n]
        try:
            foo = np.arange(fract_n[:, 0].min(), fract_n[:, 0].max() + 1)
        except:
            debug_here()
        for j in foo:  # limit meridional loop to fraction_maskZ
            fract_nj = fract_n[fract_n[:, 0] == j]
            for i in np.intersect1d(
                    fract_nj[:, 1], ATLiter[j]
            ):  # limit zonal loop to ATLmask and fraction_mask
                fract_nji = fract_nj[fract_nj[:, 1] == i, 2]
                Mxint[:, n] = np.nansum([Mxint[:, n], M[:, j, i] * fract_nji],
                                        axis=0)  # zonal integration
    utils_misc.ProgBar('done')

    return (Mxint)
コード例 #3
0
ファイル: CESM_utils_mask.py プロジェクト: LABclimate/MT
def gen_iter_maskcombo(lat_auxgrd, ncdat, mask_auxgrd_overlay_lat):
    ''' Array of size (nlatAUXgrid, nlatMODELgrid)
        Each element is a np.Array containing longitude-indices (i) where 
	both, mask_auxgrd_overlay_lat and the region mask on the model grid are True. 
    '''
    print('> generating iter_maskcombo')
    # np-arrays for speed
    lat_mgrdT, iter_lat_auxgrd, iter_lat_mgrdT, iter_lon_mgrdT = utils_mask.vars2speedup(lat_auxgrd, ncdat)
    mask_modgrd = ncdat.REGION_MASK.values
    # pre-allocation with zeros and dtype=int
    iter_maskcombo = np.zeros([len(lat_auxgrd), len(ncdat.nlat)], dtype=object)  

    for n in iter_lat_auxgrd:
      utils_misc.ProgBar('step', step=n, nsteps=len(iter_lat_auxgrd), minbarlen=60)
      for j in iter_lat_mgrdT:
        iter_maskcombo[n,j] = np.where((mask_auxgrd_overlay_lat[n,j,:]) & (mask_modgrd[j,:]>=6))[0]
    utils_misc.ProgBar('done')

    return(iter_maskcombo)

    ''' Run following lines for visual testing:
コード例 #4
0
ファイル: CESM_paths.py プロジェクト: LABclimate/MT
def get_path2figs(vartype, mkdir=False):

    figs_root = '../figures/'

    # dump miscellenian test figures here
    if vartype == 'misc':
        dirname = figs_root + 'figs_misc/'
    # correlation indices etc...
    elif vartype == 'corr':
        dirname = figs_root + 'figs_corr/'
    # Raise error for invalid key
    else:
        sys.exit('Invalid key for path2vars')
        return ()

    # -----------------------------------
    # return dirname and create directory
    # -----------------------------------
    if mkdir == True:
        try:
            utils_misc.mkdir(dirname)
        except:
            utils_misc.mkdir(figs_root)
            utils_misc.mkdir(dirname)
    return (dirname)
コード例 #5
0
ファイル: CESM_utils_mask.py プロジェクト: LABclimate/MT
def calc_H_auxgrd_xmax(lat_auxgrd, ncdat, TorUgrid):
    # a few variables to speed up subsequent loops
    iter_lat_auxgrd = np.arange(len(lat_auxgrd))
    overlayboolmask = utils_mask.gen_mask_auxgrd_overlay_lat(lat_auxgrd, ncdat)
    iter_lat_mgrd = [np.where(np.any(overlayboolmask[n,:,:], axis=1))[0] for n in iter_lat_auxgrd]
    ATLboolmask = utils_mask.get_ATLbools(ncdat.REGION_MASK)    # boolean mask 
    mask_ATLiter = utils_mask.get_ATLiter(ATLboolmask)
    # find maximal depth along longitudes
    if TorUgrid == 'T':
      H = ncdat.HT
      fname = 'HT_auxgrd_xmax'
    elif TorUgrid == 'U':
      H = ncdat.HU
      fname = 'HU_auxgrd_xmax'

    H_auxgrd_xmax = np.zeros(len(iter_lat_auxgrd))             # pre-allocation with zeros
    for n in iter_lat_auxgrd:
      utils_misc.ProgBar('step', step=n, nsteps=len(iter_lat_auxgrd))
      for j in iter_lat_mgrd[n]:
        for i in mask_ATLiter[j]:
          H_auxgrd_xmax[n] = np.nanmax([H_auxgrd_xmax[n], H[j,i]])
    utils_misc.ProgBar('done')

    return(H_auxgrd_xmax)
コード例 #6
0
ファイル: CESM_utils_dMOC.py プロジェクト: LABclimate/MT
def calc_dMxint_auxgrd(lat_auxgrd, z_auxgrd, vel_comp, M, PD, PD_bins, ncdat, path_vars, savevar=True):
    '''
    Input:
     > lat_auxgrd               : meridional auxillary grid | nparray
     > z_auxgrd                 : vertical auxillary grid | nparray
     > vel_comp         	: either 'W' or 'V' | string
     > M 			: volume transport (MW or MV) | nparray of shape [nz, nlat, nlon]
     > PD                       : potential density | nparray of shape [nz, nlat, nlon]
     > PD_bins                  : borders of PD-bins | nparray of shape [nPDbins+1]
     > ncdat                    : netCDFdata to load REGION_MASK
     > path_vars                : path for saving variables | string
     > savevar 		        : boolean
    Output:
     > dMxint                   : zonally integrated volume transport of shape [nPDbins, nlat] | nparray
    Steps:
     > Todo
    '''
    # a few variables to speed up subsequent loops 
    iter_lat_auxgrd = np.arange(len(lat_auxgrd))
    iter_lat_M = np.arange(M.shape[1])
    iter_dens = np.arange(len(PD_bins)-1)
    # get masks and iteration-indices to speed up subsequent loops (calculate if loading from file fails)
    try: 	mask_auxgrd = utils_misc.loadvar(path_vars+'mask_auxgrd')
    except: 	mask_auxgrd = utils_mask.gen_mask_grd_overlay_lat(lat_auxgrd, ncdat, path_vars)
    try:	iter_maskcombo = utils_misc.loadvar(path_vars+'iter_maskcombo')
    except:     iter_maskcombo = utils_mask.gen_iter_maskcombo(lat_auxgrd, ncdat, mask_auxgrd, path_vars)
    # pre-allocation with zeros
    mask_PD_bins = np.zeros(shape=(len(PD_bins)-1, PD.shape[1]), dtype=object)
    dMxint = np.zeros(shape=mask_PD_bins.shape)
    # zonal integration along auxgrid and conversion on density axis
    print('> zonal integration')
    for n in iter_lat_auxgrd:
      utils_misc.ProgBar('step', step=n, nsteps=len(iter_lat_auxgrd))
      for l in iter_dens:
        for j in iter_lat_M:
          mask_PD_bins[l,j] = np.where( (PD[:,j,:]>PD_bins[l]) and (PD[:,j,:]<PD_bins[l+1]) ) # depth and longitude
          for i in iter_maskcombo[n,j]: # limit zonal integration to Atlantic and grid-overlay
            if i in mask_PD_bins[l,j][1]: # combine both masks: for maskcombo and for densitybinning
              try: dMxint[l,n] = np.nansum([dMxint[l,n], M[mask_PD_bins[l,j][0],j,i]])
              except: pass # just for the cases where M[...] is an empty array, which is not accepted by nansum
    utils_misc.ProgBar('done')

    if vel_comp == 'W':
      if savevar == True: utils_misc.savevar(dMxint, path_vars+'dMWxint_auxgrd')  # save to file
    elif vel_comp == 'V':
      if savevar == True: utils_misc.savevar(dMxint, path_vars+'dMVxint_auxgrd')  # save to file

    return(dMxint)
コード例 #7
0
ファイル: CESM_paths.py プロジェクト: LABclimate/MT
def get_path2vars(vartype, CESMversion, mkdir=False):

    vars_root = '../variables/' + 'CESM_V' + str(CESMversion) + '/'

    # variables fixed to grid like maxdepths etc.
    if vartype == 'mgrd':
        dirname = vars_root + 'vars_mgrd/'
    # variables resampled on densityaxis
    elif vartype == 'dens':
        dirname = vars_root + 'vars_dens/'
    # correlation indices etc...
    elif vartype == 'corr':
        dirname = vars_root + 'vars_corr/'

    # lat: 170 equally spaced boxes from 80S to 90N | z: 60 boxes
    elif vartype == 'lat170eq80S90N':
        dirname = vars_root + 'vars_auxgrd/lat170eq80S90N/'
    # lat: 340 equally spaced boxes from 80S to 90N | z: 60 boxes
    elif vartype == 'lat340eq80S90N':
        dirname = vars_root + 'vars_auxgrd/lat340eq80S90N/'
    # lat: as in ncdat.lat_aux_grid | z: 60 boxes
    elif vartype == 'lat395model':
        dirname = vars_root + 'vars_auxgrd/lat395model/'
    # lat: as in ncdat.lat_aux_grid but only every other entry | z: 60 boxes
    elif vartype == 'lat198model':
        dirname = vars_root + 'vars_auxgrd/lat198model/'
    # lat: as in ncdat.lat_aux_grid but only every other entry | z: 60 boxes
    elif vartype == 'lat99model':
        dirname = vars_root + 'vars_auxgrd/lat99model/'
    # Raise error for invalid key
    else:
        sys.exit('Invalid key for path2vars')
        return ()

    # -----------------------------------
    # return dirname and create directory
    # -----------------------------------
    if mkdir == True:
        try:
            utils_misc.mkdir(dirname)
        except:
            utils_misc.mkdir(vars_root)
            utils_misc.mkdir(dirname)
    return (dirname)
コード例 #8
0
ファイル: UTILS_misc.py プロジェクト: LABclimate/MT
def LGS(fun_to_get_var, path_to_var, str_varname='__', verbose=True, noload=False, nosave=False, format='nc'):
    ''' 
    Usage:     > LGS(lambda: fun_to_get_var(args), path_to_var)
    '''
    try:
        if noload: raise ValueError()
        if verbose: print(' > trying to load {}\n    from {}...'.format(str_varname, path_to_var))
        if format=='nc':        var = utils_misc.loadxvar(path_to_var, str_varname, verbose=False)
        elif format=='pickle':  var = utils_misc.loadvar(path_to_var, verbose=False)
        if verbose: print(' > success!\n')
    except:
        if noload & verbose: print(' > no load (note that an eventually stored variable will be overwritten)\n > calculating {}...'.format(str_varname))
        elif verbose: print(' > failed to load!\n > calculating {}...'.format(str_varname))
        try: var = fun_to_get_var()
        except: var = fun_to_get_var # as "fun" might simply be any variable (which is uncallable and thus produces an error.)
        if verbose: print(' > success!\n > saving {} to file...'.format(str_varname))
        if format=='nc':        utils_misc.savexvar(var, path_to_var, str_varname, verbose=False)
        elif format=='pickle':  utils_misc.savevar(var, path_to_var, verbose=False)
        if verbose: print(' > success!\n')
    return(var)
コード例 #9
0
ファイル: CESM_paths.py プロジェクト: LABclimate/MT
def get_path2vars(vartype, CESMversion, mkdir=False):
    
    vars_root = '../variables/'+'CESM_V'+str(CESMversion)+'/'

    # variables fixed to grid like maxdepths etc.
    if vartype == 'mgrd':
        dirname = vars_root+'vars_mgrd/'
    # variables resampled on densityaxis
    elif vartype == 'dens':
        dirname = vars_root+'vars_dens/'
    # correlation indices etc...
    elif vartype == 'corr':
        dirname = vars_root+'vars_corr/'
        
    # lat: 170 equally spaced boxes from 80S to 90N | z: 60 boxes
    elif vartype == 'lat170eq80S90N': 
        dirname = vars_root+'vars_auxgrd/lat170eq80S90N/'
    # lat: 340 equally spaced boxes from 80S to 90N | z: 60 boxes
    elif vartype == 'lat340eq80S90N': 
        dirname = vars_root+'vars_auxgrd/lat340eq80S90N/'
    # lat: as in ncdat.lat_aux_grid | z: 60 boxes
    elif vartype == 'lat395model':
        dirname = vars_root+'vars_auxgrd/lat395model/'
    # lat: as in ncdat.lat_aux_grid but only every other entry | z: 60 boxes
    elif vartype == 'lat198model':
        dirname = vars_root+'vars_auxgrd/lat198model/'
    # lat: as in ncdat.lat_aux_grid but only every other entry | z: 60 boxes
    elif vartype == 'lat99model':
        dirname = vars_root+'vars_auxgrd/lat99model/'        
    # Raise error for invalid key
    else:
        sys.exit('Invalid key for path2vars')
        return()

    # -----------------------------------
    # return dirname and create directory
    # -----------------------------------
    if mkdir == True:
        try:    utils_misc.mkdir(dirname)
        except: utils_misc.mkdir(vars_root); utils_misc.mkdir(dirname)
    return(dirname)
コード例 #10
0
def LGS(fun_to_get_var,
        path_to_var,
        str_varname='__',
        verbose=True,
        noload=False,
        nosave=False,
        format='nc'):
    ''' 
    Usage:     > LGS(lambda: fun_to_get_var(args), path_to_var)
    '''
    try:
        if noload: raise ValueError()
        if verbose:
            print(' > trying to load {}\n    from {}...'.format(
                str_varname, path_to_var))
        if format == 'nc':
            var = utils_misc.loadxvar(path_to_var, str_varname, verbose=False)
        elif format == 'pickle':
            var = utils_misc.loadvar(path_to_var, verbose=False)
        if verbose: print(' > success!\n')
    except:
        if noload & verbose:
            print(
                ' > no load (note that an eventually stored variable will be overwritten)\n > calculating {}...'
                .format(str_varname))
        elif verbose:
            print(
                ' > failed to load!\n > calculating {}...'.format(str_varname))
        try:
            var = fun_to_get_var()
        except:
            var = fun_to_get_var  # as "fun" might simply be any variable (which is uncallable and thus produces an error.)
        if verbose:
            print(' > success!\n > saving {} to file...'.format(str_varname))
        if format == 'nc':
            utils_misc.savexvar(var, path_to_var, str_varname, verbose=False)
        elif format == 'pickle':
            utils_misc.savevar(var, path_to_var, verbose=False)
        if verbose: print(' > success!\n')
    return (var)
コード例 #11
0
ファイル: CESM_paths.py プロジェクト: LABclimate/MT
def get_path2figs(vartype, mkdir=False):
    
    figs_root = '../figures/'

    # dump miscellenian test figures here
    if vartype == 'misc':
        dirname = figs_root+'figs_misc/'
    # correlation indices etc...
    elif vartype == 'corr':
        dirname = figs_root+'figs_corr/'
    # Raise error for invalid key
    else: 
        sys.exit('Invalid key for path2vars')
        return()
    
    # -----------------------------------
    # return dirname and create directory
    # -----------------------------------
    if mkdir == True:
        try:    utils_misc.mkdir(dirname)
        except: utils_misc.mkdir(figs_root); utils_misc.mkdir(dirname)
    return(dirname)
    
コード例 #12
0
ファイル: _main_CESM_create.py プロジェクト: LABclimate/MT
CESMversion = 4
CESMrun = 'lm_1deg'
fpath = paths.get_path2data('lm_1deg', 'anndat')
fnames = ['b40.lm850-1850.1deg.001.pop.h.{:04d}.ann.4.cdf'.format(i) for i in np.arange(850, 1851)]
# -----------------------------------------------------------------------------
# Dumping paths for variables
# basic path for variables - please choose correct CESMversion, CESMrun above
path_var = '../variables/CESM{}_{}/.'.format(CESMversion, CESMrun)


# ==========================================================================================================================================================
#  Gridding parameters
# ==========================================================================================================================================================
# -----------------------------------------------------------------------------
# alternative paths for local use:
ncdat = utils_misc.loadxvar(path_var+'/any_ncdat', None)                        # get ncdata

folder_grid = '/grid'
utils_misc.savexvar(ncdat.REGION_MASK, path_var+folder_grid+'/REGION_MASK', 'REGION_MASK')
utils_misc.savexvar(ncdat.TLAT, path_var+folder_grid+'/TLAT', 'TLAT')
utils_misc.savexvar(ncdat.TLONG, path_var+folder_grid+'/TLONG', 'TLONG')
utils_misc.savexvar(ncdat.lat_aux_grid, path_var+folder_grid+'/lat_auxgrd', 'lat')
utils_misc.savexvar(ncdat.z_t, path_var+folder_grid+'/zT', 'zT')

# ==========================================================================================================================================================
#  Stack ncdata in time (e.g. BSF, MOC, ...)
# ==========================================================================================================================================================
''' Q: good choice of transport_reg=1 #??
'''
BSF = []
MOC = []
コード例 #13
0
ファイル: CESM_utils_dMOC.py プロジェクト: LABclimate/MT
def calc_dMOC_auxgrd(lat_auxgrd, dens_auxgrd, vel_comp, dMxint, ncdat, path_vars, do_norm=True, savevar=True):
    '''
    Input:
     > lat_auxgrd               : meridional auxillary grid | nparray
     > dens_auxgrd              : density auxillary grid | nparray
     > vel_comp         	: either 'W' or 'V' | string
     > dMxint                   : zonally integrated volume transport
     > ncdat                    : netCDFdata to load REGION_MASK
     > path_vars                : path for saving variables | string
     > do_norm                  : do normalisation relative to northern boundary | boolean     
     > savevar                  : boolean
    Output:
     > dMOC                     : dMOC of shape [nPDbins, nlat] | nparray
    Steps:
     > calculate dMOC by meridional of dMxint integration along aux grid
     > normalisation relative to northern boundary: at every point substract northernmost value at same depth, 
       such that streamfunction closes at NP.
    '''
    # a few variables to speed up subsequent loops 
    iter_lat_auxgrd = np.arange(len(lat_auxgrd))
    iter_dens_auxgrd = np.arange(len(dens_auxgrd))

    # preallocation of dMOC as np-array
    dMOC = np.copy(dMxint) # start with dMxint, which subsequently will be summed up

    if vel_comp == 'W':
      # meridional integration along aux grid
      print('> meridional integration')
      for n in iter_lat_auxgrd[1:]:
        utils_misc.ProgBar('step', step=n, nsteps=len(iter_lat_auxgrd), forceinit=True, minbarlen=60)
        dMOC[:,n] = np.nansum([dMOC[:,n], dMOC[:,n-1]], axis=0) 	        # meridional integration
      utils_misc.ProgBar('done')

      xrname = 'dMOC on auxillary grid calculated from WVEL'             # name of xarray
      fname = 'dMOC_auxgrd_W'                                            # name for saving

    elif vel_comp == 'V':
      # vertical integration along aux grid
      print('> vertical integration')
      for k in iter_dens_auxgrd[1:]:
        utils_misc.ProgBar('step', step=k, nsteps=len(iter_dens_auxgrd), forceinit=True, minbarlen=60)
        dMOC[k,:] = np.nansum([dMOC[k,:], dMOC[k-1,:]], axis=0) 		# meridional integration
      utils_misc.ProgBar('done')

      xrname = 'dMOC on auxillary grid calculated from VVEL'             # name of xarray
      fname = 'dMOC_auxgrd_V'                                            # name for saving

    '''
    # write to xarray
    dMOC = xr.DataArray(dMOC,
		attrs={'units':u'Sv'},
                coords=[np.arange(len(dens_auxgrd)), np.arange(len(lat_auxgrd))],
		dims=['dens', 'nlat'],
                name=xrname)
    '''
    # normalisation relative to North (shift values such that zero at northern boundary)
    if do_norm == True:
      dMOC = dMOC - dMOC[:,-1]

    # save to file
    if savevar == True:
      utils_misc.savevar(dMOC, path_vars + fname)

    return(dMOC)
コード例 #14
0
    CT = gsw.CT_from_pt(SA, PT)                 # conservative temperature
    sig2 = gsw.sigma2(SA, CT)                   # potential density anomaly referenced to 2000dbar
    
    # - conversion T-->U
    densU = np.zeros_like(sig2)
    foo1 = utils_ana.canonical_cumsum(sig2, 2, axis=-1)
    densU[:,:-1,:-1] = .25*utils_ana.canonical_cumsum(foo1, 2, axis=-2)
    densU[:,-1,:-1] = .5*utils_ana.canonical_cumsum(sig2, 2, axis=-1)[:,-1,:]
    densU[:,:-1,-1] = .5*utils_ana.canonical_cumsum(sig2, 2, axis=-2)[:,:,-1]
    densU[:,-1,-1] = sig2[:,-1,-1]
    
    # density bins:  border-values (=dbb), center-values (=dbc) and thickness (=ddb)
    dbb = np.concatenate((np.linspace(dbsetup[0,0],dbsetup[0,1],dbsetup[0,2]), np.linspace(dbsetup[1,0],dbsetup[1,1],dbsetup[1,2]), np.linspace(dbsetup[2,0],dbsetup[2,1],dbsetup[2,2]), np.linspace(dbsetup[3,0],dbsetup[3,1],dbsetup[3,2]), np.linspace(dbsetup[4,0],dbsetup[4,1],dbsetup[4,2])))
    dbc = np.convolve(dbb, np.array([.5,.5]))[1:-1]
    
    # depth of isopycnals (zdbbc) calculated as z(dbc) (=zdbc) and as c(zdbb) (=zdbbc)
    z_t_3d = utils_conv.exp_k_to_kji(ncdat.z_t, densU.shape[-2], densU.shape[-1])
    zdbc = utils_conv.resample_colwise(z_t_3d, densU, dbc, 'lin', fill_value=np.nan, mask = ATLboolmask, mono_method='sort')
    zdbb = utils_conv.resample_colwise(z_t_3d, densU, dbb, 'lin', fill_value=np.nan, mask = ATLboolmask, mono_method='sort')
    zdbbc = np.ones_like(zdbc) * np.nan
    for j in np.arange(zdbb.shape[-2]):
        for i in np.arange(zdbb.shape[-1]):
            zdbbc[:,j,i] = np.convolve(zdbb[:,j,i], np.array([.5,.5]))[1:-1] # centre-values
    
    # save variables
    utils_misc.savevar(sig2, path_sig2)
    utils_misc.savevar(densU, path_densU)
    utils_misc.savevar(zdbc, path_zdbc)
    utils_misc.savevar(zdbb, path_zdbb)
    utils_misc.savevar(zdbbc, path_zdbbc)
コード例 #15
0
                                       sig2U,
                                       DBc,
                                       method='dMV',
                                       fill_value=np.nan,
                                       mask=ATLboolmask,
                                       mono_method='force')
    dDB3d = utils_conv.exp_k_to_kji(dDB, dMVf.shape[-2], dMVf.shape[-1])
    dMVfc = utils_ana.nancumsum(dMVf * dDB3d, axis=0)
    dMVfcp = utils_conv.project_on_auxgrd(dMVfc, ncdat.ANGLE.values)
    dMOC = np.nansum(dMVfcp, axis=-1)

    # -----------------------------------------------------------------------------
    # PART III // SAVE SOME VARIABLES
    # -----------------------------------------------------------------------------
    # Note: the part [:-4] removes ".cdf"

    # create folder with choice of densitybinning
    utils_misc.mkdir(dir_vars)

    utils_misc.savevar(sig2T, dir_vars + 'sig2T_' + fnames[ii][:-4], 'sig2T')
    utils_misc.savevar(sig2U, dir_vars + 'sig2U_' + fnames[ii][:-4])

    #utils_misc.savevar(zDBc, dir_vars+'zDBc_'+fnames[ii][:-4], 'zDBc')
    #utils_misc.savevar(zDBb, dir_vars+'zDBb_'+fnames[ii][:-4], 'zDBb')
    #utils_misc.savevar(zDBbc, dir_vars+'zDBbc_'+fnames[ii][:-4], 'zDBbc')
    utils_misc.savevar(dMOC, dir_vars + 'dMOC_' + fnames[ii][:-4], 'dMOC')

    # -----------------------------------------------------------------------------
    # CLOSE NC-FILE
    # -----------------------------------------------------------------------------
    ncdat.close()
コード例 #16
0
ファイル: CESM_utils_mask.py プロジェクト: LABclimate/MT
def gen_fraction_mask(auxLAT, ncdat):
    ''' Boolean of size (nlatAUXgrid, nlatMODELgrid, nlonMODELgrid)
        It is True where latitudes of auxillary and model grid lie in the same box. 
        
        Important Note: check all border values! sometimes I cropped the loops 
                        in order not to get missing values, e.g. as U-grid and 
                        T-grid have the same shape
    '''
    #auxLAT = np.arange(-90,90, .1)
    
    print('> generating mask_auxgrd_overlay_lat')
    # -----------------------------------------------------------------
    # - Pre-allocation of mask
    fraction_mask = np.zeros([len(auxLAT)], dtype=object) # second dimension is going to be expanded below if needed
    for ii in np.arange(len(auxLAT)):
        fraction_mask[ii] = np.nan*np.ones(3) # dummies, which will be deleted afterwards
    stats = dict()
    stats['cases'] = np.zeros(10)
    stats['nLATbtw'] = np.nan * np.ones_like(ncdat.TLAT)
    stats['area'] = np.zeros([len(ncdat.nlat), len(ncdat.nlon), 3])
    stats['area'][:,:,0] = ncdat.TAREA
    # -----------------------------------------------------------------
    # - LATITUDES and LONGITUES
    TLAT = np.array(ncdat.TLAT)                 # mgrd
    TLONG = np.array(ncdat.TLONG)               # mgrd
    ULAT = np.array(ncdat.ULAT)                 # mgrd
    ULONG = np.array(ncdat.ULONG)               # mgrd
    COSULAT = np.cos(np.deg2rad(ULAT))          # cos of ULAT
    SINULAT = np.sin(np.deg2rad(ULAT))          # sin of ULAT
    COSauxLAT = np.cos(np.deg2rad(auxLAT))      # cos of auxLAT
    SINauxLAT = np.sin(np.deg2rad(auxLAT))      # sin of auxLAT
    
    # -----------------------------------------------------------------
    # - FUNCTIONS
    def dist_flat(COSLAT, SINULAT, LONG):
        ''' formula found on: https://www.math.ksu.edu/~dbski/writings/haversine.pdf'''
        ''' Radius_Earth is mean of Wikipedia-Values for R_pol and R_eq'''
        R = np.mean([6356752, 6371008]) # mean Earth radius in [m]
        return(R*np.sqrt(2-2*np.prod(COSLAT)*np.cos(np.deg2rad(np.diff(LONG)))-2*np.prod(SINLAT)))
    
    def area_heron(d):
        ''' formula found on: https://de.wikipedia.org/wiki/Dreiecksfl%C3%A4che'''
        s = np.sum(d)/2
        return(np.sqrt(s*(s-d[0])*(s-d[1])*(s-d[2])))
        
        
    def get_ji(idxji, j, i):
        out = np.zeros(len(idxji), dtype=object) # pre-allocation of output
        for ii in np.arange(len(idxji)):
            if idxji[ii] == 0: out[ii] = tuple([j-1, i-1])
            if idxji[ii] == 1: out[ii] = tuple([j-1, i])
            if idxji[ii] == 2: out[ii] = tuple([j, i-1])
            if idxji[ii] == 3: out[ii] = tuple([j, i])
          
        return(out)
        
    def argsort_WtoE(idx):
        return(np.argsort([ULONG[idx[ii]] for ii in np.arange(len(idx))]))
        
    def area_1_triangle(COSLAT, SINLAT, LONG):
        d = np.zeros(3)     # pre-allocation of distances
        # legs in m
        d[0] = dist_flat(COSLAT[[0,1]], SINLAT[[0,1]], LONG[[0,1]])
        d[1] = dist_flat(COSLAT[[1,2]], SINLAT[[1,2]], LONG[[1,2]])
        d[2] = dist_flat(COSLAT[[2,0]], SINLAT[[2,0]], LONG[[2,0]])
        # area
        return(area_heron(d))
        
    def area_2_triangles(COSLAT, SINLAT, LONG):
        d = np.zeros(5)     # pre-allocation of distances
        # legs in m       
        d[0] = dist_flat(COSLAT[[0,1]], SINLAT[[0,1]], LONG[[0,1]])
        d[1] = dist_flat(COSLAT[[1,2]], SINLAT[[1,2]], LONG[[1,2]])
        d[2] = dist_flat(COSLAT[[2,0]], SINLAT[[2,0]], LONG[[2,0]])        
        d[3] = dist_flat(COSLAT[[2,3]], SINLAT[[2,3]], LONG[[2,3]])
        d[4] = dist_flat(COSLAT[[0,3]], SINLAT[[0,3]], LONG[[0,3]])
        # areas
        AREAa = area_heron(d[[0,1,2]])
        AREAb = area_heron(d[[2,3,4]])
        return(AREAa + AREAb)
        
    def area_3_triangles(COSLAT, SINLAT, LONG):
        d = np.zeros(7)     # pre-allocation of distances
        # legs in m
        d[0] = dist_flat(COSLAT[[0,1]], SINLAT[[0,1]], LONG[[0,1]])
        d[1] = dist_flat(COSLAT[[1,2]], SINLAT[[1,2]], LONG[[1,2]])
        d[2] = dist_flat(COSLAT[[2,0]], SINLAT[[2,0]], LONG[[2,0]])        
        d[3] = dist_flat(COSLAT[[2,3]], SINLAT[[2,3]], LONG[[2,3]])
        d[4] = dist_flat(COSLAT[[0,3]], SINLAT[[0,3]], LONG[[0,3]])
        d[5] = dist_flat(COSLAT[[3,4]], SINLAT[[3,4]], LONG[[3,4]])
        d[6] = dist_flat(COSLAT[[0,4]], SINLAT[[0,4]], LONG[[0,4]])
        # areas
        AREAa = area_heron(d[[0,1,2]])
        AREAb = area_heron(d[[2,3,4]])
        AREAc = area_heron(d[[4,5,6]])
        return(AREAa + AREAb + AREAc)
        
    # -----------------------------------------------------------------
    # - LOOP over mgrd        
    for j in np.arange(1,len(ncdat.nlat)):
        utils_misc.ProgBar('step', step=j, nsteps=len(ncdat.nlat), minbarlen=60, forceinit = True)
        for i in np.arange(1,len(ncdat.nlon)):
            ULATji = ULAT[j-1:j+1, i-1:i+1].flatten()   # flat and thus allocatable with [0,1,2,3]
            ULONGji = ULONG[j-1:j+1, i-1:i+1].flatten() # flat and thus allocatable with [0,1,2,3]
            MAXLAT = np.max(ULATji)
            MINLAT = np.min(ULATji)
            idxLATbtw = np.where((auxLAT<MAXLAT) & (auxLAT>MINLAT))[-1] # indices of auxLAT between min, max corner of ULATji
            LATbtw = auxLAT[idxLATbtw]
            nLATbtw = len(idxLATbtw)
            try: LATbtwplusone = auxLAT[idxLATbtw+1]
            except: LATbtwplusone = np.hstack((auxLAT[idxLATbtw], 100)) #! buffer value for overshoot at upper border of lat_auxgrd
            AREA = np.zeros(nLATbtw+1) # pre-allocation of AREA
            
            stats['nLATbtw'][j,i] = nLATbtw

            ## --- only in one LATbin -----------------------------------------
            if nLATbtw == 0:
                stats['cases'][0] += 1
                idx = np.where(auxLAT<MINLAT)[-1][-1]
                fraction_mask[idx] = np.vstack((fraction_mask[idx], np.array([j,i,1])))
                # -----------------------------------------------------------------
                # - calculation of areas for check
                COSLAT, SINLAT, LONG, = np.zeros(4), np.zeros(4), np.zeros(4)   # pre-allocation for points
                COSLAT[0], SINLAT[0], LONG[0] = COSULAT[j-1,i-1], SINULAT[j-1,i-1], ULONG[j-1,i-1] # SW corner
                COSLAT[1], SINLAT[1], LONG[1] = COSULAT[j-1,i], SINULAT[j-1,i], ULONG[j-1,i] # SE corner
                COSLAT[2], SINLAT[2], LONG[2] = COSULAT[j,i], SINULAT[j,i], ULONG[j,i] # NE corner
                COSLAT[3], SINLAT[3], LONG[3] = COSULAT[j,i-1], SINULAT[j,i-1], ULONG[j,i-1] # NW corner
                A = area_2_triangles(COSLAT, SINLAT, LONG)
                # - Comparison with TAREA from ncdat
                stats['area'][j,i,1] = A
                stats['area'][j,i,2] = np.diff([A, ncdat.TAREA[j,i]]) 
                
                continue

            
            ## --- multiple LATbins -------------------------------------------
            # -----------------------------------------------------------------
            # - LONGITUDES of intersects  #!THCHK
            LONGbtwW, LONGbtwE = np.zeros(nLATbtw), np.zeros(nLATbtw)
            # lowest values
            nLATfloor = np.sum(ULATji<LATbtw[0]) # number of corners below lowest LATbtw
            if nLATfloor == 1 and ULATji[0]<LATbtw[0]: # W corner is lowest
                LONGbtwW[0] = ULONGji[0] + np.diff(ULONGji[[0,2]])/np.diff(ULATji[[0,2]]) * (LATbtw[0]-ULATji[0])
                LONGbtwE[0] = ULONGji[0] + np.diff(ULONGji[[0,1]])/np.diff(ULATji[[0,1]]) * (LATbtw[0]-ULATji[0])
            elif nLATfloor == 1 and ULATji[1]<LATbtw[0]: # E corner is lowest
                LONGbtwW[0] = ULONGji[1] + np.diff(ULONGji[[1,0]])/np.diff(ULATji[[1,0]]) * (LATbtw[0]-ULATji[1])
                LONGbtwE[0] = ULONGji[1] + np.diff(ULONGji[[1,3]])/np.diff(ULATji[[1,3]]) * (LATbtw[0]-ULATji[1])
            elif nLATfloor == 2:
                LONGbtwW[0] = ULONGji[0] + np.diff(ULONGji[[0,2]])/np.diff(ULATji[[0,2]]) * (LATbtw[0]-ULATji[0])
                LONGbtwE[0] = ULONGji[1] + np.diff(ULONGji[[1,3]])/np.diff(ULATji[[1,3]]) * (LATbtw[0]-ULATji[1])
            # highest values
            nLATtop = np.sum(ULATji>LATbtw[-1]) # number of corners above highest LATbtw
            if nLATtop == 1 and ULATji[2]>LATbtw[-1]: # W corner is highest
                LONGbtwW[-1] = ULONGji[2] + np.diff(ULONGji[[2,0]])/np.diff(ULATji[[2,0]]) * (LATbtw[-1]-ULATji[2])
                LONGbtwE[-1] = ULONGji[2] + np.diff(ULONGji[[2,3]])/np.diff(ULATji[[2,3]]) * (LATbtw[-1]-ULATji[2])
            elif nLATtop == 1 and ULATji[3]>LATbtw[-1]: # E corner is highest
                LONGbtwW[-1] = ULONGji[3] + np.diff(ULONGji[[3,2]])/np.diff(ULATji[[3,2]]) * (LATbtw[-1]-ULATji[3])
                LONGbtwE[-1] = ULONGji[3] + np.diff(ULONGji[[3,1]])/np.diff(ULATji[[3,1]]) * (LATbtw[-1]-ULATji[3])
            elif nLATtop == 2:
                LONGbtwW[-1] = ULONGji[2] + np.diff(ULONGji[[2,0]])/np.diff(ULATji[[2,0]]) * (LATbtw[-1]-ULATji[2])
                LONGbtwE[-1] = ULONGji[3] + np.diff(ULONGji[[3,1]])/np.diff(ULATji[[3,1]]) * (LATbtw[-1]-ULATji[3])
            # eventual middle values
            if nLATbtw >=3:
                LONGbtwW[1:-1] = ULONGji[0] + np.diff(ULONGji[[0,2]])/np.diff(ULATji[[0,2]]) * (LATbtw[1:-1]-ULATji[0])
                LONGbtwE[1:-1] = ULONGji[1] + np.diff(ULONGji[[1,3]])/np.diff(ULATji[[1,3]]) * (LATbtw[1:-1]-ULATji[1])           
            
            # -----------------------------------------------------------------
            # - AREA calculation...
            # ... of lower part
            idxLATbelow = get_ji(np.where(ULATji<LATbtw[0])[-1], j, i)
            nbelow = len(idxLATbelow)
            if nbelow == 0:
                print('nbelow = 0!! What next?')
                raise ValueError('Error')
            elif nbelow == 1: # Case 7
                stats['cases'][7] += 1
                COSLAT, SINLAT, LONG= np.zeros(3), np.zeros(3), np.zeros(3)     # pre-allocation for points
                COSLAT[0], SINLAT[0], LONG[0] = COSULAT[idxLATbelow[0]], SINULAT[idxLATbelow[0]], ULONG[idxLATbelow[0]] # corner
                COSLAT[1], SINLAT[1], LONG[1] = COSauxLAT[idxLATbtw[0]], SINauxLAT[idxLATbtw[0]], LONGbtwE[0]           # E intersect
                COSLAT[2], SINLAT[2], LONG[2] = COSauxLAT[idxLATbtw[0]], SINauxLAT[idxLATbtw[0]], LONGbtwW[0]           # W intersect
                AREA[0] = area_1_triangle(COSLAT, SINLAT, LONG)
            elif nbelow == 2: # Case 8
                stats['cases'][8] += 1
                COSLAT, SINLAT, LONG, = np.zeros(4), np.zeros(4), np.zeros(4)   # pre-allocation for points
                COSLAT[0], SINLAT[0], LONG[0] = COSULAT[idxLATbelow[0]], SINULAT[idxLATbelow[0]], ULONG[idxLATbelow[0]] # W corner
                COSLAT[1], SINLAT[1], LONG[1] = COSULAT[idxLATbelow[1]], SINULAT[idxLATbelow[1]], ULONG[idxLATbelow[1]] # E corner
                COSLAT[2], SINLAT[2], LONG[2] = COSauxLAT[idxLATbtw[0]], SINauxLAT[idxLATbtw[0]], LONGbtwE[0]           # E intersect
                COSLAT[3], SINLAT[3], LONG[3] = COSauxLAT[idxLATbtw[0]], SINauxLAT[idxLATbtw[0]], LONGbtwW[0]           # W intersect
                AREA[0] = area_2_triangles(COSLAT, SINLAT, LONG)
            elif nbelow == 3: # Case 9
                stats['cases'][9] += 1
                COSLAT, SINLAT, LONG, = np.zeros(5), np.zeros(5), np.zeros(5)   # pre-allocation for points
                idx = argsort_WtoE(idxLATbelow) # indices of indices of Lower corners, sorted from W to E
                COSLAT[0], SINLAT[0], LONG[0] = COSULAT[idxLATbelow[idx[0]]], SINULAT[idxLATbelow[idx[0]]], ULONG[idxLATbelow[idx[0]]] # W corner
                COSLAT[1], SINLAT[1], LONG[1] = COSULAT[idxLATbelow[idx[1]]], SINULAT[idxLATbelow[idx[1]]], ULONG[idxLATbelow[idx[1]]] # C corner
                COSLAT[2], SINLAT[2], LONG[2] = COSULAT[idxLATbelow[idx[2]]], SINULAT[idxLATbelow[idx[2]]], ULONG[idxLATbelow[idx[2]]] # E corner
                COSLAT[3], SINLAT[3], LONG[3] = COSauxLAT[idxLATbtw[0]], SINauxLAT[idxLATbtw[0]], LONGbtwE[0]           # E intersect
                COSLAT[4], SINLAT[4], LONG[4] = COSauxLAT[idxLATbtw[0]], SINauxLAT[idxLATbtw[0]], LONGbtwW[0]           # W intersect
                AREA[0] = area_3_triangles(COSLAT, SINLAT, LONG)
   
            # ... of eventual centre part(s)      
            if nLATbtw>=2:
                for bb in np.arange(1,nLATbtw):
                    idxLATabove = get_ji(np.where((ULATji>LATbtw[bb-1]) & (ULATji<=LATbtwplusone[bb-1]))[-1], j, i)
                    nabove = len(idxLATabove)
                    if nabove == 0: # Case 4
                        stats['cases'][4] += 1
                        COSLAT, SINLAT, LONG, = np.zeros(4), np.zeros(4), np.zeros(4)   # pre-allocation for points
                        COSLAT[0], SINLAT[0], LONG[0] = COSauxLAT[idxLATbtw[bb]], SINauxLAT[idxLATbtw[bb]], LONGbtwW[bb]  # NW intersect
                        COSLAT[1], SINLAT[1], LONG[1] = COSauxLAT[idxLATbtw[bb]], SINauxLAT[idxLATbtw[bb]], LONGbtwE[bb]  # NE intersect
                        COSLAT[2], SINLAT[2], LONG[2] = COSauxLAT[idxLATbtw[bb-1]], SINauxLAT[idxLATbtw[bb-1]], LONGbtwW[bb-1]        # SW intersect
                        COSLAT[3], SINLAT[3], LONG[3] = COSauxLAT[idxLATbtw[bb-1]], SINauxLAT[idxLATbtw[bb-1]], LONGbtwE[bb-1]         # SE intersect
                        AREA[bb] = area_2_triangles(COSLAT, SINLAT, LONG)
                    elif nabove == 1 and ULONG[idxLATabove[0]]<LONGbtwW[bb-1]: # Case 5a
                        stats['cases'][5] += 1
                        COSLAT, SINLAT, LONG, = np.zeros(5), np.zeros(5), np.zeros(5)   # pre-allocation for points
                        COSLAT[0], SINLAT[0], LONG[0] = COSauxLAT[idxLATbtw[bb]], SINauxLAT[idxLATbtw[bb]], LONGbtwE[bb]  # NE intersect
                        COSLAT[1], SINLAT[1], LONG[1] = COSauxLAT[idxLATbtw[bb]], SINauxLAT[idxLATbtw[bb]], LONGbtwW[bb]  # NW intersect
                        COSLAT[2], SINLAT[2], LONG[2] = COSULAT[idxLATabove[0]], SINULAT[idxLATabove[0]], ULONG[idxLATabove[0]] # corner
                        COSLAT[3], SINLAT[3], LONG[3] = COSauxLAT[idxLATbtw[bb-1]], SINauxLAT[idxLATbtw[bb-1]], LONGbtwW[bb-1]        # SW intersect
                        COSLAT[4], SINLAT[4], LONG[4] = COSauxLAT[idxLATbtw[bb-1]], SINauxLAT[idxLATbtw[bb-1]], LONGbtwE[bb-1]         # SE intersect
                        AREA[bb] = area_3_triangles(COSLAT, SINLAT, LONG)
                    elif nabove == 1 and ULONG[idxLATabove[0]]>LONGbtwE[bb-1]: # Case 5b
                        stats['cases'][5] += 1
                        COSLAT, SINLAT, LONG, = np.zeros(5), np.zeros(5), np.zeros(5)   # pre-allocation for points
                        COSLAT[0], SINLAT[0], LONG[0] = COSauxLAT[idxLATbtw[bb]], SINauxLAT[idxLATbtw[bb]], LONGbtwE[bb]  # NE intersect
                        COSLAT[1], SINLAT[1], LONG[1] = COSauxLAT[idxLATbtw[bb]], SINauxLAT[idxLATbtw[bb]], LONGbtwW[bb]  # NW intersect
                        COSLAT[2], SINLAT[2], LONG[2] = COSauxLAT[idxLATbtw[bb-1]], SINauxLAT[idxLATbtw[bb-1]], LONGbtwW[bb-1]        # SW intersect
                        COSLAT[3], SINLAT[3], LONG[3] = COSauxLAT[idxLATbtw[bb-1]], SINauxLAT[idxLATbtw[bb-1]], LONGbtwE[bb-1]         # SE intersect
                        COSLAT[4], SINLAT[4], LONG[4] = COSULAT[idxLATabove[0]], SINULAT[idxLATabove[0]], ULONG[idxLATabove[0]] # corner
                        AREA[bb] = area_3_triangles(COSLAT, SINLAT, LONG)                        
                    elif nabove == 2: # Case 6
                        stats['cases'][6] += 1
                        idx = argsort_WtoE(idxLATabove) # indices of indices of Upper corners, sorted from W to E
                        COSLAT, SINLAT, LONG, = np.zeros(6), np.zeros(6), np.zeros(6)   # pre-allocation for points
                        COSLAT[0], SINLAT[0], LONG[0] = COSauxLAT[idxLATbtw[bb]], SINauxLAT[idxLATbtw[bb]], LONGbtwE[bb]  # NE intersect
                        COSLAT[1], SINLAT[1], LONG[1] = COSauxLAT[idxLATbtw[bb]], SINauxLAT[idxLATbtw[bb]], LONGbtwW[bb]  # NW intersect
                        COSLAT[2], SINLAT[2], LONG[2] = COSULAT[idxLATabove[idx[0]]], SINULAT[idxLATabove[idx[0]]], ULONG[idxLATabove[idx[0]]] # W corner
                        COSLAT[3], SINLAT[3], LONG[3] = COSauxLAT[idxLATbtw[bb-1]], SINauxLAT[idxLATbtw[bb-1]], LONGbtwW[bb-1]        # SW intersect
                        COSLAT[4], SINLAT[4], LONG[4] = COSauxLAT[idxLATbtw[bb-1]]  , SINauxLAT[idxLATbtw[bb-1]], LONGbtwE[bb-1]         # SE intersect
                        COSLAT[5], SINLAT[5], LONG[5] = COSULAT[idxLATabove[idx[1]]], SINULAT[idxLATabove[idx[1]]], ULONG[idxLATabove[idx[1]]] # E corner
                        AREA[bb] = area_3_triangles(COSLAT, SINLAT, LONG)

            # ... of upper part
            idxLATabove = get_ji(np.where(ULATji>LATbtw[-1])[-1], j, i)
            nabove = len(idxLATabove)
            if nabove == 0:
                print('nbelow = 0!! What next?')
                raise ValueError('Error')
            elif nabove == 1: # Case 1
                stats['cases'][1] += 1                
                COSLAT, SINLAT, LONG= np.zeros(3), np.zeros(3), np.zeros(3)     # pre-allocation for points
                COSLAT[0], SINLAT[0], LONG[0] = COSULAT[idxLATabove[0]], SINULAT[idxLATabove[0]], ULONG[idxLATabove[0]] # corner
                COSLAT[1], SINLAT[1], LONG[1] = COSauxLAT[idxLATbtw[0]], SINauxLAT[idxLATbtw[0]], LONGbtwW[0]           # W intersect
                COSLAT[2], SINLAT[2], LONG[2] = COSauxLAT[idxLATbtw[0]], SINauxLAT[idxLATbtw[0]], LONGbtwE[0]           # E intersect
                AREA[-1] = area_1_triangle(COSLAT, SINLAT, LONG)
            elif nabove == 2: # Case 2
                stats['cases'][2] += 1    
                COSLAT, SINLAT, LONG, = np.zeros(4), np.zeros(4), np.zeros(4)   # pre-allocation for points
                COSLAT[0], SINLAT[0], LONG[0] = COSULAT[idxLATabove[1]], SINULAT[idxLATabove[1]], ULONG[idxLATabove[1]] # E corner
                COSLAT[1], SINLAT[1], LONG[1] = COSULAT[idxLATabove[0]], SINULAT[idxLATabove[0]], ULONG[idxLATabove[0]] # W corner
                COSLAT[2], SINLAT[2], LONG[2] = COSauxLAT[idxLATbtw[0]], SINauxLAT[idxLATbtw[0]], LONGbtwW[0]           # W intersect
                COSLAT[3], SINLAT[3], LONG[3] = COSauxLAT[idxLATbtw[0]], SINauxLAT[idxLATbtw[0]], LONGbtwE[0]           # E intersect
                AREA[-1] = area_2_triangles(COSLAT, SINLAT, LONG)
            elif nabove == 3: # Case 3
                stats['cases'][3] += 1       
                COSLAT, SINLAT, LONG, = np.zeros(5), np.zeros(5), np.zeros(5)   # pre-allocation for points
                idx = argsort_WtoE(idxLATabove) # indices of indices of Upper corners, sorted from W to E
                COSLAT[0], SINLAT[0], LONG[0] = COSULAT[idxLATabove[idx[2]]], SINULAT[idxLATabove[idx[2]]], ULONG[idxLATabove[idx[2]]] # Upper corner West
                COSLAT[1], SINLAT[1], LONG[1] = COSULAT[idxLATabove[idx[1]]], SINULAT[idxLATabove[idx[1]]], ULONG[idxLATabove[idx[1]]] # Upper corner centre
                COSLAT[2], SINLAT[2], LONG[2] = COSULAT[idxLATabove[idx[0]]], SINULAT[idxLATabove[idx[0]]], ULONG[idxLATabove[idx[0]]] # Upper corner East
                COSLAT[3], SINLAT[3], LONG[3] = COSauxLAT[idxLATbtw[0]], SINauxLAT[idxLATbtw[0]], LONGbtwW[0]           # West intersect
                COSLAT[4], SINLAT[4], LONG[4] = COSauxLAT[idxLATbtw[0]], SINauxLAT[idxLATbtw[0]], LONGbtwE[0]           # East intersect
                AREA[-1] = area_3_triangles(COSLAT, SINLAT, LONG)
            
            # -----------------------------------------------------------------
            # - Fraction of areas
            fracAREA = AREA/np.sum(AREA)
            # - Comparison with TAREA from ncdat
            stats['area'][j,i,1] = np.sum(AREA)
            stats['area'][j,i,2] = np.diff([np.sum(AREA), ncdat.TAREA[j,i]])            
            # -----------------------------------------------------------------
            # - Write to fraction_mask
            for ii in np.arange(nLATbtw+1):
                if ii == 0: idx = idxLATbtw[0]-1
                else:       idx = idxLATbtw[ii-1]
                try: fraction_mask[idx] = np.vstack((fraction_mask[idx], np.array([j,i,fracAREA[ii]])))
                except: debug_here()
    
    # ---------------------------------------------------------------------             
    # delete dummy arrays
    for ii in np.arange(len(auxLAT)):
        fraction_mask[ii] = np.delete(fraction_mask[ii], 0,0)
    print('statistics cases: \n{} '.format(stats['cases']))
    # return(fraction_mask, stats)
    return(fraction_mask)
コード例 #17
0
ファイル: CESM_utils_dMOC.py プロジェクト: LABclimate/MT
def calc_dMOC_auxgrd(lat_auxgrd,
                     dens_auxgrd,
                     vel_comp,
                     dMxint,
                     ncdat,
                     path_vars,
                     do_norm=True,
                     savevar=True):
    '''
    Input:
     > lat_auxgrd               : meridional auxillary grid | nparray
     > dens_auxgrd              : density auxillary grid | nparray
     > vel_comp         	: either 'W' or 'V' | string
     > dMxint                   : zonally integrated volume transport
     > ncdat                    : netCDFdata to load REGION_MASK
     > path_vars                : path for saving variables | string
     > do_norm                  : do normalisation relative to northern boundary | boolean     
     > savevar                  : boolean
    Output:
     > dMOC                     : dMOC of shape [nPDbins, nlat] | nparray
    Steps:
     > calculate dMOC by meridional of dMxint integration along aux grid
     > normalisation relative to northern boundary: at every point substract northernmost value at same depth, 
       such that streamfunction closes at NP.
    '''
    # a few variables to speed up subsequent loops
    iter_lat_auxgrd = np.arange(len(lat_auxgrd))
    iter_dens_auxgrd = np.arange(len(dens_auxgrd))

    # preallocation of dMOC as np-array
    dMOC = np.copy(
        dMxint)  # start with dMxint, which subsequently will be summed up

    if vel_comp == 'W':
        # meridional integration along aux grid
        print('> meridional integration')
        for n in iter_lat_auxgrd[1:]:
            utils_misc.ProgBar('step',
                               step=n,
                               nsteps=len(iter_lat_auxgrd),
                               forceinit=True,
                               minbarlen=60)
            dMOC[:, n] = np.nansum([dMOC[:, n], dMOC[:, n - 1]],
                                   axis=0)  # meridional integration
        utils_misc.ProgBar('done')

        xrname = 'dMOC on auxillary grid calculated from WVEL'  # name of xarray
        fname = 'dMOC_auxgrd_W'  # name for saving

    elif vel_comp == 'V':
        # vertical integration along aux grid
        print('> vertical integration')
        for k in iter_dens_auxgrd[1:]:
            utils_misc.ProgBar('step',
                               step=k,
                               nsteps=len(iter_dens_auxgrd),
                               forceinit=True,
                               minbarlen=60)
            dMOC[k, :] = np.nansum([dMOC[k, :], dMOC[k - 1, :]],
                                   axis=0)  # meridional integration
        utils_misc.ProgBar('done')

        xrname = 'dMOC on auxillary grid calculated from VVEL'  # name of xarray
        fname = 'dMOC_auxgrd_V'  # name for saving
    '''
    # write to xarray
    dMOC = xr.DataArray(dMOC,
		attrs={'units':u'Sv'},
                coords=[np.arange(len(dens_auxgrd)), np.arange(len(lat_auxgrd))],
		dims=['dens', 'nlat'],
                name=xrname)
    '''
    # normalisation relative to North (shift values such that zero at northern boundary)
    if do_norm == True:
        dMOC = dMOC - dMOC[:, -1]

    # save to file
    if savevar == True:
        utils_misc.savevar(dMOC, path_vars + fname)

    return (dMOC)
コード例 #18
0
ファイル: main_CESM_controls.py プロジェクト: LABclimate/MT
lat_auxgrd, zT_auxgrd, z_w_auxgrd = utils_mask.gen_auxgrd(ncdat, auxgrd_name)
lat_mgrd = ncdat.TLAT.isel(
    nlon=0)  # mean of LAT for each j #! very inappropriate
# ---------------------------------------------------------------------------------------
# - Potential density and binning
SA = ncdat.SALT[0, :, :, :].values  # absolute salinity
PT = ncdat.TEMP[0, :, :, :].values  # potential temperature
CT = gsw.CT_from_pt(SA, PT)  # conservative temperature
sig2 = gsw.sigma2(SA, CT)  # potential density anomaly referenced to 2000dbar
RHO = ncdat.RHO[
    0, :, :, :].values * 1000 - 1000  # in-situ density anomaly [SI]
PD_bins = np.linspace(27, 38, 200)  # PD_bins = np.linspace(1.004,1.036,65)
# ---------------------------------------------------------------------------------------
# - Paths
path_auxgrd = paths.get_path2var(auxgrd_name)
utils_misc.checkdir(path_auxgrd)
path_mgrd = 'vars_mgrd/'
path_dens = 'vars_dens/'
varname_binning = 'eqbins_{}to{}in{}steps'.format(int(PD_bins.min()),
                                                  int(PD_bins.max()),
                                                  int(len(PD_bins)))

# =======================================================================================
#  Variables contained in model output
# =======================================================================================
# - temperature -------------------------------------------------------------------------
T = ncdat.TEMP.mean(dim='time').isel(z_t=0)
T = utils_mask.mask_ATLANTIC(T, ncdat.REGION_MASK)
# - in-situ density ---------------------------------------------------------------------
rho = utils_mask.mask_ATLANTIC(ncdat.RHO.mean(dim='time'), ncdat.REGION_MASK)
rho = rho.mean(dim='nlon')
コード例 #19
0
ファイル: CESM_utils_dMOC.py プロジェクト: LABclimate/MT
def calc_dMxint_auxgrd(lat_auxgrd,
                       z_auxgrd,
                       vel_comp,
                       M,
                       PD,
                       PD_bins,
                       ncdat,
                       path_vars,
                       savevar=True):
    '''
    Input:
     > lat_auxgrd               : meridional auxillary grid | nparray
     > z_auxgrd                 : vertical auxillary grid | nparray
     > vel_comp         	: either 'W' or 'V' | string
     > M 			: volume transport (MW or MV) | nparray of shape [nz, nlat, nlon]
     > PD                       : potential density | nparray of shape [nz, nlat, nlon]
     > PD_bins                  : borders of PD-bins | nparray of shape [nPDbins+1]
     > ncdat                    : netCDFdata to load REGION_MASK
     > path_vars                : path for saving variables | string
     > savevar 		        : boolean
    Output:
     > dMxint                   : zonally integrated volume transport of shape [nPDbins, nlat] | nparray
    Steps:
     > Todo
    '''
    # a few variables to speed up subsequent loops
    iter_lat_auxgrd = np.arange(len(lat_auxgrd))
    iter_lat_M = np.arange(M.shape[1])
    iter_dens = np.arange(len(PD_bins) - 1)
    # get masks and iteration-indices to speed up subsequent loops (calculate if loading from file fails)
    try:
        mask_auxgrd = utils_misc.loadvar(path_vars + 'mask_auxgrd')
    except:
        mask_auxgrd = utils_mask.gen_mask_grd_overlay_lat(
            lat_auxgrd, ncdat, path_vars)
    try:
        iter_maskcombo = utils_misc.loadvar(path_vars + 'iter_maskcombo')
    except:
        iter_maskcombo = utils_mask.gen_iter_maskcombo(lat_auxgrd, ncdat,
                                                       mask_auxgrd, path_vars)
    # pre-allocation with zeros
    mask_PD_bins = np.zeros(shape=(len(PD_bins) - 1, PD.shape[1]),
                            dtype=object)
    dMxint = np.zeros(shape=mask_PD_bins.shape)
    # zonal integration along auxgrid and conversion on density axis
    print('> zonal integration')
    for n in iter_lat_auxgrd:
        utils_misc.ProgBar('step', step=n, nsteps=len(iter_lat_auxgrd))
        for l in iter_dens:
            for j in iter_lat_M:
                mask_PD_bins[l, j] = np.where(
                    (PD[:, j, :] > PD_bins[l])
                    and (PD[:, j, :] < PD_bins[l + 1]))  # depth and longitude
                for i in iter_maskcombo[
                        n,
                        j]:  # limit zonal integration to Atlantic and grid-overlay
                    if i in mask_PD_bins[l, j][
                            1]:  # combine both masks: for maskcombo and for densitybinning
                        try:
                            dMxint[l, n] = np.nansum(
                                [dMxint[l, n], M[mask_PD_bins[l, j][0], j, i]])
                        except:
                            pass  # just for the cases where M[...] is an empty array, which is not accepted by nansum
    utils_misc.ProgBar('done')

    if vel_comp == 'W':
        if savevar == True:
            utils_misc.savevar(dMxint,
                               path_vars + 'dMWxint_auxgrd')  # save to file
    elif vel_comp == 'V':
        if savevar == True:
            utils_misc.savevar(dMxint,
                               path_vars + 'dMVxint_auxgrd')  # save to file

    return (dMxint)
コード例 #20
0
# =======================================================================================
#  Streamfunctions
# =======================================================================================
# ---------------------------------------------------------------------------------------
# - Volume transports (in Sv)
MW_mgrd = utils_transp.calc_MW(ncdat)                                           # on model grid
MV_mgrd = utils_transp.calc_MV(ncdat)                                          # on model grid
#MV_projauxgrd = utils_conv.project_on_auxgrd(MV_mgrd, ncdat.ANGLE.values)      # projected on auxiliary grid (same shape as on model grid)

# - conversion on density axis
#try:    dMW = utils_misc.loadvar(path_dens+fname_MWdens)                    # load from file
#except:
#print(' > loading failed!')
# resampled MW_mgrd on centre of T grid
MW_z_t = utils_conv.resample_colwise(MW_mgrd.values, MW_mgrd.z_w_top.values, ncdat.z_t.values, method='lin', mask = ATLboolmask, mono_method='sort')
utils_misc.savevar(MW_z_t, path_dens+fname_MWzt)                           # save to file
# resampled MW_mgrd on density axis (still pointing in vertical direction)
dMW = utils_conv.resample_colwise(MW_z_t, ncdat.z_t.values, zdb, method='dMW_zdb', fill_value=np.nan, mask = ATLboolmask, mono_method='force')
#dMW = utils_conv.resample_colwise(MW_z_t, sig2, db, method='dMW_db', fill_value=np.nan, mask = ATLboolmask, mono_method='force')

utils_misc.savevar(dMW, path_dens+fname_MWdens)                         # save to file

# ---------------------------------------------------------------------------------------
# - Streamfunctions (in Sv)...

BSF_mgrd, MVzint = utils_BSF.calc_BSF_mgrd(MV_mgrd, dump_MVzint=True)

MOC_mgrd_W, MWxint_mgrd = utils_MOC.calc_MOC_mgrd('W', MW_mgrd, do_norm=True, dump_Mxint=True)
MWxint_auxgrd = utils_MOC.calc_Mxint_auxgrd(lat_auxgrd, z_w_auxgrd, 'W', MW_mgrd.values, ncdat, path_auxgrd)
MOC_auxgrd_W, MOC_auxgrd_W_norm = utils_MOC.calc_MOC_auxgrd(lat_auxgrd, z_w_auxgrd, 'W', MWxint_auxgrd, 'forward', path_auxgrd)
コード例 #21
0
ファイル: CESM_utils_conv.py プロジェクト: LABclimate/MT
def resample_colwise(odat,
                     ogrd,
                     ngrd,
                     method,
                     fill_value=np.nan,
                     mask='none',
                     mono_method='filter'):
    '''
    Uses:
     > utils_conv.resample_1dim_lininterp()
    Input:
     > odat:        array of dim 1 or 3 | data on model grid
     > ogrd:        array of dim | old grid
     > ngrd:        new grid 
     > method:      string | 'lin'      (linear interpolation), 
                             'dMW_db'   (for dMW calculation with density as reference for weighting)
                             'dMW_zdb'  (for dMW calculation with isopycnal depth as reference for weighting)
                             'sum'      (sum over all datapoints within bin on new grid)
     > fill_value:  float or np.nan | value used to fill in for requested points outside the range of ogrd.
     > mask:        mask of densityshape [j,i], default: all True (no mask)
     > mono_method: string | 'filter' (will sort ogrd (and odat) such that ogrd is monotonically increasing (not necess. in strict sense!))
                             'force'  (will manipulate ogrd such that strictly monotonically increasing (brute method, not recommended))
    Output:
     > ndat:        resampled data on new grid
    Comments:
     > add warning if negative gradients occur.
     > #!! for discreapencies at the low- and high-density borders see the comment in resample_1dim_weightedmean().
    '''

    print(' > columnwise resampling')

    # shape of data-array
    if len(odat.shape) == 3:
        len_j = odat.shape[-2]  # assumed to be the second last entry
        len_i = odat.shape[-1]  # assumed to be the last entry
    elif len(odat.shape) == 2:
        len_j = 1  # set to one, such that j-loop is executed only once.
        len_i = odat.shape[-1]  # assumed to be the last entry
    elif len(odat.shape) == 1:
        len_j = 1  # set to one, such that j-loop is executed only once.
        len_i = 1  # set to one, such that i-loop is executed only once.

    # expand shape ngrd, ogrd and odat to 3 dimensions
    #   note: singleton-dimensions are intended depending on original shape of odat
    def expand_shape(varin):
        if len(varin.shape) == 1:  # 1dim --> 3dim
            return (utils_conv.exp_k_to_kji(varin, len_j, len_i))
        elif len(varin.shape) == 2:  # 2dim --> 3dim
            sys.exit('case of two-dimensional ogrd is not implemented yet!')
        elif len(varin.shape) == 3:  # already 3dim
            return (varin)  # no expansion needed.

    ngrd = expand_shape(ngrd)
    ogrd = expand_shape(ogrd)
    odat = expand_shape(odat)

    # get default for regional mask (i.e. do not mask anything)
    if mask == 'none': mask = np.ones(shape=[len_j, len_i], dtype=bool)

    # pre-allocation of ndat
    ndat = fill_value * np.ones_like(ngrd)

    # loop over columns
    for j in np.arange(len_j):
        utils_misc.ProgBar('step', step=j, nsteps=len_j)
        for i in np.arange(len_i):

            # skip masked [j,i]-tuples
            if mask[j, i] == False: continue

            # reduce ngrd, ogrd and odat to current column
            ngrd_ji = ngrd[:, j, i]
            ogrd_ji = ogrd[:, j, i]
            odat_ji = odat[:, j, i]

            # detect disjunct ogrd and ngrd and continue with next column
            if (np.nanmax(ogrd_ji) < np.nanmin(ngrd_ji)) or (
                    np.nanmax(ngrd_ji) < np.nanmin(ogrd_ji)):
                print(
                    'disjunct ogrd and ngrd at (j,i)=({}, {}). (please check conservation of integrated flux!)'
                    .format(j, i))
                continue

            # function to make grd strictly monotoneously increasing
            def make_mono(grd, dat, mono_method):
                if mono_method == 'sort':  # sort grd and dat relative to grd
                    idx_sort = np.argsort(grd)
                    grd, dat = grd[idx_sort], dat[idx_sort]
                elif mono_method == 'force':  # manipulate grd (i.e. increase dropping values until mon. incr.)
                    for k in np.arange(1, ogrd.shape[0]):
                        if ogrd_ji[k] <= ogrd_ji[k - 1]:
                            ogrd_ji[k] = ogrd_ji[k - 1] + 1e-10
                return (grd, dat)

            # resampling
            if method == 'lin':  # simple linear interpolation
                # make monotoneously increasing
                if any(np.diff(ogrd_ji) <= 0):
                    ogrd_ji, odat_ji = make_mono(ogrd_ji, odat_ji, mono_method)
                # linear interpolation
                try:
                    idxn_start = np.where(ngrd_ji > np.nanmin(ogrd_ji))[0][0]
                except:
                    idxn_start = 0  #!
                ndat[:, j,
                     i], gaps_border, gaps_center = utils_conv.resample_1dim_lininterp(
                         odat_ji, ogrd_ji, ngrd_ji, idxn_start, fill_value)

            elif method == 'dMW_db':  # for dMW calculation with density as reference for weighting
                try:
                    idxn_start = np.where(ngrd_ji > np.nanmin(ogrd_ji))[0][0]
                except:
                    idxn_start = 0  #!
                ndat[:, j,
                     i], gaps_border, gaps_center = utils_conv.resample_1dim_lininterp(
                         odat_ji, ogrd_ji, ngrd_ji, idxn_start, fill_value=0)

            elif method == 'dMW_zdb':  # for dMW calculation with isopycnal depth as reference for weighting
                try:
                    idxn_start = np.where(ngrd_ji > np.nanmin(ogrd_ji))[0][0]
                except:
                    idxn_start = 0  #!
                ndat[:, j,
                     i], gaps_border, gaps_center = utils_conv.resample_1dim_lininterp(
                         odat_ji, ogrd_ji, ngrd_ji, idxn_start, fill_value=0)
                ndat[gaps_border, j, i] = fill_value

            elif method == 'dMV':  # for dMV calculation
                try:
                    idxn_start = np.where(ngrd_ji > np.nanmin(ogrd_ji))[0][0]
                except:
                    idxn_start = 0  #!
                ndat[:, j,
                     i], gaps_border, gaps_center = utils_conv.resample_1dim_lininterp(
                         odat_ji, ogrd_ji, ngrd_ji, idxn_start, fill_value=0)

            elif method == 'sum':  #! doesn't work right now!
                try:
                    idxn_start = np.where(ngrd_ji > np.nanmin(ogrd_ji))[0][0]
                except:
                    idxn_start = 0  #!
                ndat[:, j, i], gaps_border, gaps_center = resample_1dim_sum(
                    odat_ji, ogrd_ji, ngrd, idxn_start, fill_value)
                ndat[:, j, i] = fill_gaps(ndat[:, j, i], gaps_border,
                                          gaps_center, fill_value)
            else:
                raise ValueError(
                    'unexpected method passed to resample_colwise.')
    utils_misc.ProgBar('done')

    return (np.squeeze(ndat)
            )  # remove singleton dimensions (i.e. (1d --> 3d) --> back to 1d)
コード例 #22
0
ファイル: PREP_dMOC.py プロジェクト: LABclimate/MT
    dMVfc       = utils_ana.nancumsum(dMVf*dzDBc, axis=0) #! changed dDB to dzDBc
    dMVfcp      = utils_conv.project_on_auxgrd(dMVfc, ncdat.ANGLE.values)
    dMOC        = np.nansum(dMVfcp, axis=-1)
    '''

    dMVf        = utils_conv.resample_colwise(MVf, sig2U, DBc, method='dMV', fill_value=np.nan, mask = ATLboolmask, mono_method='force')
    dDB3d       = utils_conv.exp_k_to_kji(dDB, dMVf.shape[-2], dMVf.shape[-1])
    dMVfc       = utils_ana.nancumsum(dMVf*dDB3d, axis=0)
    dMVfcp      = utils_conv.project_on_auxgrd(dMVfc, ncdat.ANGLE.values)
    dMOC        = np.nansum(dMVfcp, axis=-1)

# -----------------------------------------------------------------------------
# PART III // SAVE SOME VARIABLES
# -----------------------------------------------------------------------------
    # Note: the part [:-4] removes ".cdf"

    # create folder with choice of densitybinning
    utils_misc.mkdir(dir_vars)

    utils_misc.savevar(sig2T, dir_vars+'sig2T_'+fnames[ii][:-4], 'sig2T')
    utils_misc.savevar(sig2U, dir_vars+'sig2U_'+fnames[ii][:-4])
    
    #utils_misc.savevar(zDBc, dir_vars+'zDBc_'+fnames[ii][:-4], 'zDBc')
    #utils_misc.savevar(zDBb, dir_vars+'zDBb_'+fnames[ii][:-4], 'zDBb')
    #utils_misc.savevar(zDBbc, dir_vars+'zDBbc_'+fnames[ii][:-4], 'zDBbc')
    utils_misc.savevar(dMOC, dir_vars+'dMOC_'+fnames[ii][:-4], 'dMOC')

# -----------------------------------------------------------------------------
# CLOSE NC-FILE
# -----------------------------------------------------------------------------
    ncdat.close()
コード例 #23
0
fnames = [
    'b40.lm850-1850.1deg.001.pop.h.{:04d}.ann.4.cdf'.format(i)
    for i in np.arange(850, 1500)
]

# #############################################################################
#  Load variables
# #############################################################################
# -----------------------------------------------------------------------------
#  Load time independent variables
ncdat = xr.open_dataset(fpath + fnames[0], decode_times=False)
TAREA = ncdat.TAREA
# -----------------------------------------------------------------------------
#  Load BSF and MOC (try from pickled file, otherwise load from ncdata)
try:
    BSF_mod = utils_misc.loadvar(path_corr + 'BSF_mod')
    MOC_mod = utils_misc.loadvar(path_corr + 'MOC_mod')
except:
    BSF_mod = []
    MOC_mod = []
    for t in np.arange(len(fnames)):
        print t
        try:
            # load netcdf file
            ncdat = xr.open_dataset(fpath + fnames[t], decode_times=False)
            # write streamfunctions to variables (in Sv)
            BSF_mod = utils_time.concat(
                BSF_mod, utils_mask.mask_ATLANTIC(ncdat.BSF,
                                                  ncdat.REGION_MASK))
            MOC_mod = utils_time.concat(
                MOC_mod, ncdat.MOC.isel(transport_reg=1, moc_comp=0))
コード例 #24
0
                print('\b' * (2 + barlen)),  # set the cursor back to start
                sys.stdout.flush()
            else:
                print('\b.'),  #! do NOT delete the pending comma!!
                sys.stdout.flush(),
    elif stage == 'done':
        print('\b]  Done!')


def Counter(step, nsteps, stepname='Step', mod=1):
    if not np.mod(step, mod):
        print('\r{} {} / {}'.format(stepname, step, nsteps)),
        sys.stdout.flush()


'''
# code for testing
import time
for i in np.arange(21):
    time.sleep(.01)
    ProgBar('step', step = i, nsteps = 21)
utils_misc.ProgBar('done')
'''


# --- load variable using pickle
def loadvar(path_to_var, str_varname='__', verbose=True):
    if verbose:
        print(' > loading {}\n    from {}'.format(str_varname, path_to_var))
    sys.stdout.flush()
    with open(path_to_var, 'rb') as f:
コード例 #25
0
ファイル: _main_CESM_dMOCVtest.py プロジェクト: LABclimate/MT
# =======================================================================================
#  Settings and Directories
# =======================================================================================
auxgrd_name = ['lat395model', 'lat198model', 'lat99model', 'lat170eq80S90N', 'lat340eq80S90N'][2] # choose aux grid
 #1 DBsetup = np.array([[20, 33, 14], [33.1, 36, 11], [36.05, 37.5, 11], [38, 43, 11]]) # setup for density bins [lower, upper, steps]
 #2 DBsetup = np.array([[12, 22, 3], [25,30, 3], [31,35,6], [35, 38, 40], [38.5,42, 3]]) # setup for density bins [lower, upper, steps]
 #3 DBsetup = np.array([[11, 30, 6], [30,35,6], [35, 36.5, 51], [36.5,37, 51], [37,43, 7]]) # setup for density bins [lower, upper, steps]
DBsetup     = np.array([[11, 30, 6], [30,35,6], [35, 36.5, 51], [36.5,38, 101], [38,43, 8]]) # setup for density bins (border values) [lower, upper, steps]
boolLGSnoload = True    # boolean | False: normal loadgetsave procedure | True: noload-mode > will get and save (overwrite!)
# ---------------------------------------------------------------------------------------
# automatical generation of directory names
dir_mgrd        = paths.get_path2vars('mgrd',CESMversion=CESMversion, mkdir=True)
dir_dens        = paths.get_path2vars('dens', CESMversion=CESMversion, mkdir=True)
dir_DB          = 'DB_%gto%gin%g_%gto%gin%g_%gto%gin%g_%gto%gin%g_%gto%gin%g/' % tuple(DBsetup.flatten())
dir_auxgrd      = paths.get_path2vars(auxgrd_name, CESMversion=CESMversion, mkdir=True)
utils_misc.mkdir(dir_dens+dir_DB)
# paths (directory + filenames) to temp variables
path_dMV       = dir_dens+dir_DB+'dMV_'
path_dMVf      = dir_dens+dir_DB+'dMVf_'
path_zDBb       = dir_dens+dir_DB+'zDBb'
path_zDBc       = dir_dens+dir_DB+'zDBc'
path_zDBbc      = dir_dens+dir_DB+'zDBbc'
path_HT_auxgrd_xmax = dir_auxgrd+'HT_auxgrd_xmax'
path_HT_mgrd_xmax   = dir_mgrd+'HT_mgrd_xmax'
path_HU_auxgrd_xmax = dir_auxgrd+'HU_auxgrd_xmax'
path_HU_mgrd_xmax   = dir_mgrd+'HU_mgrd_xmax'
path_lat_auxgrd     = '../variables/CESM_gen/lat_auxgrd_'+auxgrd_name
path_fraction_mask = dir_auxgrd+'fraction_mask'

# =======================================================================================
#  Transformation on different grids (Density, Spatial auxiliary grid)
コード例 #26
0
ファイル: main_CESM_controls.py プロジェクト: LABclimate/MT
# - Spatial auxiliary grid
auxgrd_name = ['lat395model', 'lat198model', 'lat170eq80S90N', 'lat340eq80S90N'][0]       # choose aux grid
lat_auxgrd, zT_auxgrd, z_w_auxgrd = utils_mask.gen_auxgrd(ncdat, auxgrd_name)
lat_mgrd = ncdat.TLAT.isel(nlon=0)          # mean of LAT for each j #! very inappropriate
# ---------------------------------------------------------------------------------------
# - Potential density and binning
SA = ncdat.SALT[0,:,:,:].values             # absolute salinity
PT = ncdat.TEMP[0,:,:,:].values             # potential temperature
CT = gsw.CT_from_pt(SA, PT)                 # conservative temperature
sig2 = gsw.sigma2(SA, CT)                   # potential density anomaly referenced to 2000dbar
RHO = ncdat.RHO[0,:,:,:].values*1000-1000   # in-situ density anomaly [SI]
PD_bins = np.linspace(27,38,200)            # PD_bins = np.linspace(1.004,1.036,65)
# ---------------------------------------------------------------------------------------
# - Paths
path_auxgrd = paths.get_path2var(auxgrd_name)
utils_misc.checkdir(path_auxgrd)
path_mgrd = 'vars_mgrd/'
path_dens = 'vars_dens/'
varname_binning = 'eqbins_{}to{}in{}steps'.format(int(PD_bins.min()), int(PD_bins.max()), int(len(PD_bins)))

# =======================================================================================
#  Variables contained in model output
# =======================================================================================
# - temperature -------------------------------------------------------------------------
T = ncdat.TEMP.mean(dim='time').isel(z_t=0)
T = utils_mask.mask_ATLANTIC(T, ncdat.REGION_MASK)
# - in-situ density ---------------------------------------------------------------------
rho = utils_mask.mask_ATLANTIC(ncdat.RHO.mean(dim='time'), ncdat.REGION_MASK)
rho = rho.mean(dim='nlon')

# =======================================================================================
コード例 #27
0
ファイル: _main_CESM_create.py プロジェクト: LABclimate/MT
fpath = paths.get_path2data('lm_1deg', 'anndat')
fnames = [
    'b40.lm850-1850.1deg.001.pop.h.{:04d}.ann.4.cdf'.format(i)
    for i in np.arange(850, 1851)
]
# -----------------------------------------------------------------------------
# Dumping paths for variables
# basic path for variables - please choose correct CESMversion, CESMrun above
path_var = '../variables/CESM{}_{}/.'.format(CESMversion, CESMrun)

# ==========================================================================================================================================================
#  Gridding parameters
# ==========================================================================================================================================================
# -----------------------------------------------------------------------------
# alternative paths for local use:
ncdat = utils_misc.loadxvar(path_var + '/any_ncdat', None)  # get ncdata

folder_grid = '/grid'
utils_misc.savexvar(ncdat.REGION_MASK, path_var + folder_grid + '/REGION_MASK',
                    'REGION_MASK')
utils_misc.savexvar(ncdat.TLAT, path_var + folder_grid + '/TLAT', 'TLAT')
utils_misc.savexvar(ncdat.TLONG, path_var + folder_grid + '/TLONG', 'TLONG')
utils_misc.savexvar(ncdat.lat_aux_grid, path_var + folder_grid + '/lat_auxgrd',
                    'lat')
utils_misc.savexvar(ncdat.z_t, path_var + folder_grid + '/zT', 'zT')

# ==========================================================================================================================================================
#  Stack ncdata in time (e.g. BSF, MOC, ...)
# ==========================================================================================================================================================
''' Q: good choice of transport_reg=1 #??
'''
コード例 #28
0
fpath = paths.get_path2data('lm_1deg', 'anndat')
#fpath = '../data'
fnames = ['b40.lm850-1850.1deg.001.pop.h.{:04d}.ann.4.cdf'.format(i) for i in np.arange(850, 1500)]

# #############################################################################
#  Load variables
# #############################################################################
# -----------------------------------------------------------------------------
#  Load time independent variables
ncdat = xr.open_dataset(fpath+fnames[0], decode_times=False)
TAREA = ncdat.TAREA
# -----------------------------------------------------------------------------
#  Load BSF and MOC (try from pickled file, otherwise load from ncdata)
try:
    BSF_mod = utils_misc.loadvar(path_corr+'BSF_mod') 
    MOC_mod = utils_misc.loadvar(path_corr+'MOC_mod') 
except:
    BSF_mod = []
    MOC_mod = []
    for t in np.arange(len(fnames)):
        print t
        try:
            # load netcdf file
            ncdat = xr.open_dataset(fpath+fnames[t], decode_times=False)
            # write streamfunctions to variables (in Sv)
            BSF_mod = utils_time.concat(BSF_mod, utils_mask.mask_ATLANTIC(ncdat.BSF, ncdat.REGION_MASK))
            MOC_mod = utils_time.concat(MOC_mod, ncdat.MOC.isel(transport_reg=1, moc_comp=0))
        except:
            print('ERROR with file ' + fpath+fnames[t] + ' (t = {:d})'.format(t))
    # save to file