コード例 #1
0
def calculateRegions(time_lrp,lats1,lons1):
    ### Southeast Asia
    lataq = np.where((lats1 >= 10) & (lats1 <= 40))[0]
    lata1 = lats1[lataq]
    lonaq = np.where((lons1 >= 105) & (lons1 <= 120))[0]
    lona1 = lons1[lonaq]
    time_lrpA1 = time_lrp[:,lataq,:]
    time_lrpA = time_lrpA1[:,:,lonaq]
    lona2,lata2 = np.meshgrid(lona1,lata1)
    mean_lrpA = UT.calc_weightedAve(time_lrpA,lata2)
    
    
    ### India
    latiq = np.where((lats1 >= 15) & (lats1 <= 40))[0]
    lati1 = lats1[latiq]
    loniq = np.where((lons1 >= 70) & (lons1 <= 105))[0]
    loni1 = lons1[loniq]
    time_lrpI1 = time_lrp[:,latiq,:]
    time_lrpI = time_lrpI1[:,:,loniq]
    loni2,lati2 = np.meshgrid(loni1,lati1)
    mean_lrpI = UT.calc_weightedAve(time_lrpI,lati2)
    
    ### North Atlantic Warming Hole
    latwq = np.where((lats1 >= 50) & (lats1 <= 60))[0]
    latw1 = lats1[latwq]
    lonwq = np.where((lons1 >= 315) & (lons1 <= 340))[0]
    lonw1 = lons1[lonwq]
    time_lrpW1 = time_lrp[:,latwq,:]
    time_lrpW = time_lrpW1[:,:,lonwq]
    lonw2,latw2 = np.meshgrid(lonw1,latw1)
    mean_lrpW = UT.calc_weightedAve(time_lrpW,latw2)
    
    ### Sahara
    latdq = np.where((lats1 >= 0) & (lats1 <= 15))[0]
    latd1 = lats1[latdq]
    londq1 = np.where((lons1 >= 0) & (lons1 <= 45))[0]
    londq2 = np.where((lons1 >= 350) & (lons1 <= 360))[0]
    londq = np.append(londq1 ,londq2)
    lond1 = lons1[londq]
    time_lrpD1 = time_lrp[:,latdq,:]
    time_lrpD = time_lrpD1[:,:,londq]
    lond2,latd2 = np.meshgrid(lond1,latd1)
    mean_lrpD = UT.calc_weightedAve(time_lrpD,latd2)
    
    ### Southern Ocean Section
    latsoq = np.where((lats1 >= -66) & (lats1 <= -40))[0]
    latso1 = lats1[latsoq]
    lonsoq = np.where((lons1 >= 5) & (lons1 <= 70))[0]
    # lonsoq2 = np.where((lons1 >= 330) & (lons1 <= 360))[0]
    # lonsoq = np.append(lonsoq1 ,lonsoq2)
    lonso1 = lons1[lonsoq]
    time_lrpSO1 = time_lrp[:,latsoq,:]
    time_lrpSO = time_lrpSO1[:,:,lonsoq]
    lonso2,latso2 = np.meshgrid(lonso1,latso1) 
    mean_lrpSO = UT.calc_weightedAve(time_lrpSO,latso2)
    
    ### Add together
    regions = [mean_lrpA,mean_lrpI,mean_lrpW,mean_lrpD,mean_lrpSO]
    
    return regions
コード例 #2
0
def readZ30(varnames):
    lat,lon,time,lev,tashit = DO.readMeanExperiAll('%s' % varnames,
                                                'HIT','surface')
    lat,lon,time,lev,tasfict = DO.readMeanExperiAll('%s' % varnames,
                                                'FICT','surface')
    
    ### Create 2d array of latitude and longitude
    lon2,lat2 = np.meshgrid(lon,lat)
    
    ### Read in QBO phases 
    filenamehitp = directorydata + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[0]
    filenamehitn = directorydata + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[2]
    filenamehitp2 = directorydata2 + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[0]
    filenamehitn2 = directorydata2 + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[2]
    pos_hit = np.append(np.genfromtxt(filenamehitp,unpack=True,usecols=[0],dtype='int'),
                        np.genfromtxt(filenamehitp2,unpack=True,usecols=[0],dtype='int')+100)
    neg_hit = np.append(np.genfromtxt(filenamehitn,unpack=True,usecols=[0],dtype='int'),
                        np.genfromtxt(filenamehitn2,unpack=True,usecols=[0],dtype='int')+100)    
    
    filenamefictp = directorydata + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[0]
    filenamefictn = directorydata + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[2]
    filenamefictp2 = directorydata2 + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[0]
    filenamefictn2 = directorydata2 + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[2]
    pos_fict = np.append(np.genfromtxt(filenamefictp,unpack=True,usecols=[0],dtype='int'),
                        np.genfromtxt(filenamefictp2,unpack=True,usecols=[0],dtype='int')+100)
    neg_fict = np.append(np.genfromtxt(filenamefictn,unpack=True,usecols=[0],dtype='int'),
                        np.genfromtxt(filenamefictn2,unpack=True,usecols=[0],dtype='int')+100)

    tas_mo = [tashit,tasfict]

    ### Composite by QBO phase    
    tas_mohitpos = tas_mo[0][pos_hit,:,:,:]
    tas_mofictpos = tas_mo[1][pos_fict,:,:,:]
    
    tas_mohitneg = tas_mo[0][neg_hit,:,:,:]
    tas_mofictneg = tas_mo[1][neg_fict,:,:,:]
    
    ### Compute comparisons for months - select region
    if varnames == 'Z30':
        latq = np.where((lat >=65) & (lat <=90))[0]
        fictpos = tas_mofictpos
        fictneg = tas_mofictneg
        fictpos = fictpos[:,:,latq]
        fictneg = fictneg[:,:,latq]
        lat2s = lat2[latq,:]
        fictpos = UT.calc_weightedAve(fictpos,lat2s)
        fictneg = UT.calc_weightedAve(fictneg,lat2s)
        
        hitpos = tas_mohitpos
        hitneg = tas_mohitneg
        hitpos = hitpos[:,:,latq]
        hitneg = hitneg[:,:,latq]
        hitpos = UT.calc_weightedAve(hitpos,lat2s)
        hitneg = UT.calc_weightedAve(hitneg,lat2s)
        
    diffruns = [fictpos.squeeze(),fictneg.squeeze(),hitpos.squeeze(),hitneg.squeeze()]
    
    return diffruns
コード例 #3
0
def readVar(varnames,monthperiod):
    ### Call function to read in ERA-Interim
    lat,lon,time,lev,era = MOR.readDataR(varnames,'surface',False,True)
    
    ### Call functions to read in WACCM data
    models = np.empty((len(runnamesm),ensembles,era.shape[0],era.shape[1],
                       era.shape[2],era.shape[3]))
    for i in range(len(runnamesm)):
        lat,lon,time,lev,models[i] = MOM.readDataM(varnames,runnamesm[i],
                                                   'surface',False,True)
        
    ### Retrieve time period of interest
    if monthperiod == 'DJF':
        modq = np.empty((len(runnamesm),ensembles,era.shape[0]-1,era.shape[2],
                           era.shape[3]))
        for i in range(len(runnamesm)):
            for j in range(ensembles):
                modq[i,j,:,:,:] = UT.calcDecJanFeb(models[i,j,:,:,:],
                                                    lat,lon,'surface',1)
        eraq = UT.calcDecJanFeb(era,lat,lon,'surface',1)
    elif monthperiod == 'Annual':
        modq = np.nanmean(models[:,:,:,:,:,:],axis=3)
        eraq = np.nanmean(era[:,:,:,:],axis=1)
    
    ### Take ensemble mean
    modmean = np.nanmean(modq,axis=1)
    
    ### Slice over Arctic polar cap
    latslicemin = 40
    latslicemax = 65
    lonslicemin = 50
    lonslicemax = 130
    latq = np.where((lat >= latslicemin) & (lat <= latslicemax))[0]
    lat = lat[latq]
    lonq = np.where((lon >= lonslicemin) & (lon <= lonslicemax))[0]
    lon = lon[lonq]
    eraq = eraq[:,latq,:]
    eraq = eraq[:,:,lonq]
    modmean = modmean[:,:,latq,:]
    modmean = modmean[:,:,:,lonq]
    
    ### Meshgrid for lat/lon
    lon2,lat2 = np.meshgrid(lon,lat)
    
    eramean = UT.calc_weightedAve(eraq,lat2)
    mmean = UT.calc_weightedAve(modmean,lat2)
    
    ### Create climo over time series
    eraiave = np.nanmean(eramean)
    modelave = np.nanmean(mmean,axis=1)
    
    ### Calculate anomalies
    eraanom = eramean - eraiave
    modelanom = (mmean.transpose() - modelave).transpose()
    
    return eraanom, modelanom
コード例 #4
0
def calcOBS_UBI(var, lat, lon):
    """
    Script calculates the Ural Blocking Index for reanalysis
    """

    ### Import modules
    import numpy as np
    import calc_Utilities as UT

    ### Meshgrid for lat,lon
    lon2, lat2 = np.meshgrid(lon, lat)

    ### Calculate UBI
    lonq1 = np.where((lon >= 0) & (lon <= 90))[0]
    lonq2 = np.where((lon >= 330) & (lon <= 360))[0]
    lonq = np.append(lonq1, lonq2)
    latq = np.where((lat >= 45) & (lat <= 80))[0]
    varlon = var[:, :, lonq]
    varu = varlon[:, latq, :]
    lat2uq = lat2[latq, :]
    lat2u = lat2uq[:, lonq]
    varubi = UT.calc_weightedAve(varu, lat2u)

    print('\n========Calculated *OBS* Ural Blocking Index=======\n')
    return varubi
コード例 #5
0
def remove_annual_mean(data,data_obs,lats,lons,lats_obs,lons_obs):
    """
    Removes annual mean from data set
    """
    
    ### Import modulates
    import numpy as np
    import calc_Utilities as UT
    
    ### Create 2d grid
    lons2,lats2 = np.meshgrid(lons,lats)
    lons2_obs,lats2_obs = np.meshgrid(lons_obs,lats_obs)
    
    ### Calculate weighted average and remove mean
    data = data - UT.calc_weightedAve(data,lats2)[:,:,np.newaxis,np.newaxis]
    data_obs = data_obs - UT.calc_weightedAve(data_obs,lats2_obs)[:,np.newaxis,np.newaxis]
    
    return data,data_obs
コード例 #6
0
def aveSST(data, lat, lon, mask, period):
    if mask == 'polar_cap':
        latpolar = 67.
        latq = np.where((lat >= latpolar))[0]
        sstnew = sst[:, :, latq, :]
        latt = lat[latq]
        lonn = lon
    elif mask == 'north_barents':
        latq = np.where((lat >= 76.4) & (lat <= 79.4))[0]
        lonq = np.where((lon >= 38) & (lon <= 60))[0]
        latt = lat[latq]
        lonn = lon[lonq]
        sstnew1 = sst[:, :, latq, :]
        sstnew = sstnew1[:, :, :, lonq]
    elif mask == 'chukchi':
        latq = np.where((lat >= 68) & (lat <= 74))[0]
        lonq = np.where((lon >= 180) & (lon <= 200))[0]
        latt = lat[latq]
        lonn = lon[lonq]
        sstnew1 = sst[:, :, latq, :]
        sstnew = sstnew1[:, :, :, lonq]
    elif mask == 'bering':
        latq = np.where((lat >= 54) & (lat <= 64))[0]
        lonq = np.where((lon >= 180) & (lon <= 200))[0]
        latt = lat[latq]
        lonn = lon[lonq]
        sstnew1 = sst[:, :, latq, :]
        sstnew = sstnew1[:, :, :, lonq]
    elif mask == 'global':
        sstnew = sst
        latt = lat
        lonn = lon

    ### Mask lat/lon for 2d
    lon2, lat2 = np.meshgrid(lonn, latt)

    ### Calculated weighted average
    sstave = UT.calc_weightedAve(sstnew, lat2)
    plt.contourf(sstnew[0, 0, :, :])
    plt.show()

    ### Calculate temporal period
    if period == 'annual':
        sstperiod = np.nanmean(sstave, axis=1)
    elif period == 'aug':
        sstperiod = sstave[:, 7]
    elif period == 'sep':
        sstperiod = sstave[:, 8]

    ### Calculate anomalies using 1982-2010 baseline
    years = np.arange(1982, 2019 + 1, 1)
    yearq = np.where((years >= 1982) & (years <= 2010))[0]

    climo = np.nanmean(sstperiod[yearq])
    anomperiod = sstperiod - climo

    return sstave, anomperiod, latt, lonn
コード例 #7
0
def calcOBS_SHI(var, lat, lon):
    """
    Script calculates the Siberian High Index for reanalysis
    """

    ### Import modules
    import numpy as np
    import calc_Utilities as UT

    ### Meshgrid for lat,lon
    lon2, lat2 = np.meshgrid(lon, lat)

    ### Calculate SHI
    lonq = np.where((lon >= 80) & (lon <= 120))[0]
    latq = np.where((lat >= 40) & (lat <= 65))[0]
    varlon = var[:, :, lonq]
    anoms = varlon[:, latq, :]
    lat2sq = lat2[latq, :]
    lat2s = lat2sq[:, lonq]
    varshi = UT.calc_weightedAve(anoms, lat2s)

    print('\n========Calculated *OBS* Siberian High Index========\n')
    return varshi
コード例 #8
0
def calcOBS_PolarCap(var, lat, lon, latpolar):
    """
    Script calculates the polar cap average for reanalysis
    """
    ### Import modules
    import numpy as np
    import calc_Utilities as UT

    ### Meshgrid for lat,lon
    lon2, lat2 = np.meshgrid(lon, lat)

    ### Calculate SHI
    latq = np.where((lat >= latpolar))[0]
    if var.ndim == 3:
        varp = var[:, latq, :]
        lat2p = lat2[latq, :]
    elif var.ndim == 4:
        varp = var[:, :, latq, :]
        lat2p = lat2[latq, :]
    varave = UT.calc_weightedAve(varp, lat2p)

    print('\n========Calculated *OBS* Polar Cap Average========\n')
    return varave
コード例 #9
0
regionsall = [globe,trop,nh,sh,io,enso,na,a,africa,so]

### Calculate regional averages for histograms
regions_average = []
for i in range(len(regionsall)):
    latq = np.where((lats >= latallmin[i]) & (lats <= latallmax[i]))[0]
    lonq = np.where((lons >= lonallmin[i]) & (lons <= lonallmax[i]))[0]
    latnew = lattall[i][latq]
    lonnew = lonnall[i][lonq]
    lonnew2,latnew2 = np.meshgrid(lonnew,latnew)
    
    regiongrid1 = regionsall[i][:,:,latq,:]
    regiongrid = regiongrid1[:,:,:,lonq]
    
    ave = UT.calc_weightedAve(regiongrid,latnew2)
    regions_average.append(ave)
    
### Calculate PDFs
num_bins = np.arange(-0.4,0.401,0.005)
pdfregions = np.empty((len(regions_average),meancomp.shape[0],len(num_bins)))
for rrr in range(len(regions_average)):
    for hist in range(meancomp.shape[0]):
        m,s = sts.norm.fit(regions_average[rrr][hist].ravel())
        pdfregions[rrr,hist,:] = sts.norm.pdf(num_bins,m,s)

###############################################################################
###############################################################################
###############################################################################    
### Create graph 
plt.rc('text',usetex=True)
コード例 #10
0
ファイル: calc_PolarCap.py プロジェクト: muskanmahajan37/AA
def PolarCap(simu,vari,level,latpolar,period):
    """
    Script calculates average over the polar cap (set latitude)
    """
    ### Import modules
    import numpy as np
    import calc_Utilities as UT
    import read_CTLNQ as CONT
    import read_ExpMonthly as NUDG
    import read_ShortCoupled as COUP
    import read_SIT as THICK
    import read_SIC as CONC
    import read_SIT_E3SM as E3SIT
    import read_SIC_E3SM as E3SIC
    import read_OldIceExperi as OLD
    import read_LongCoupled as LC
    
    if any([vari=='T700',vari=='T500']):
        varia = 'TEMP'
        level = 'profile'
    elif vari == 'U700':
        varia = 'U'
        level = 'profile'
    else:
        varia = vari
    
    ############################################################################### 
    ############################################################################### 
    ############################################################################### 
    if simu == 'AA-2030':
        lat,lon,lev,future = NUDG.readExperi(varia,'AA','2030',level,'none')
        lat,lon,lev,historical = CONT.readControl(varia,level,'none')
    elif simu == 'AA-2060':
        lat,lon,lev,future = NUDG.readExperi(varia,'AA','2060',level,'none')
        lat,lon,lev,historical = CONT.readControl(varia,level,'none')
    elif simu == 'AA-2090':
        lat,lon,lev,future = NUDG.readExperi(varia,'AA','2090',level,'none')
        lat,lon,lev,historical = CONT.readControl(varia,level,'none')
    ############################################################################### 
    elif simu == 'coupled_Pd':
        lat,lon,lev,future = COUP.readCOUPs(varia,'C_Fu',level)
        lat,lon,lev,historical = COUP.readCOUPs(varia,'C_Pd',level)      
    ############################################################################### 
    elif simu == 'coupled_Pi':
        lat,lon,lev,future = COUP.readCOUPs(varia,'C_Fu',level)
        lat,lon,lev,historical = COUP.readCOUPs(varia,'C_Pi',level)      
    ###############################################################################        
    elif simu == 'SIT':
        lat,lon,lev,future = THICK.readSIT(varia,'SIT_Fu',level)
        lat,lon,lev,historical = THICK.readSIT(varia,'SIT_Pd',level)
    ############################################################################### 
    elif simu == 'SIC_Pd':
        lat,lon,lev,future = CONC.readSIC(varia,'Fu',level)
        lat,lon,lev,historical = CONC.readSIC(varia,'Pd',level)
    ############################################################################### 
    elif simu == 'SIC_Pi':
        lat,lon,lev,future = CONC.readSIC(varia,'Fu',level)
        lat,lon,lev,historical = CONC.readSIC(varia,'Pi',level)
    ############################################################################### 
    elif simu == 'E3SIT':
        lat,lon,lev,future = E3SIT.readE3SM_SIT(varia,'ESIT_Fu',level)
        lat,lon,lev,historical = E3SIT.readE3SM_SIT(varia,'ESIT_Pd_B',level)
    ############################################################################### 
    elif simu == 'E3SIC_Pd':
        lat,lon,lev,future = E3SIC.readE3SM_SIC(varia,'ESIC_Fu',level)
        lat,lon,lev,historical = E3SIC.readE3SM_SIC(varia,'ESIC_Pd',level)
    elif simu == 'E3SIC_Pi':
        lat,lon,lev,future = E3SIC.readE3SM_SIC(varia,'ESIC_Fu',level)
        lat,lon,lev,historical = E3SIC.readE3SM_SIC(varia,'ESIC_Pi',level)
    ############################################################################### 
    elif simu == 'OLD':
        lat,lon,lev,future = OLD.readOldIceExperi(varia,'FICT',level)
        lat,lon,lev,historical = OLD.readOldIceExperi(varia,'HIT',level)
    ############################################################################### 
    elif simu == 'LONG':
        lat,lon,lev,future = LC.readLong(varia,'Long_Fu',level)
        lat,lon,lev,historical = LC.readLong(varia,'Long_Pd',level)
    ############################################################################### 
    ############################################################################### 
    ############################################################################### 
    ### Check for missing data [ensembles,months,lat,lon]
    future[np.where(future <= -1e10)] = np.nan
    historical[np.where(historical <= -1e10)] = np.nan
    
    ### Check for 4D field
    if vari == 'T700':
        levq = np.where(lev == 700)[0]
        future = future[:,:,levq,:,:].squeeze()
        historical = historical[:,:,levq,:,:].squeeze()
    elif vari == 'T500':
        levq = np.where(lev == 500)[0]
        future = future[:,:,levq,:,:].squeeze()
        historical = historical[:,:,levq,:,:].squeeze()
    elif vari == 'U700':
        levq = np.where(lev == 700)[0]
        future = future[:,:,levq,:,:].squeeze()
        historical = historical[:,:,levq,:,:].squeeze()
    
    ############################################################################### 
    ############################################################################### 
    ############################################################################### 
    ### Calculate over period
    if period == 'OND':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,-3:,:,:],axis=1)
        historicalm = np.nanmean(historical[:,-3:,:,:],axis=1)
    elif period == 'D':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,-1:,:,:],axis=1)
        historicalm = np.nanmean(historical[:,-1:,:,:],axis=1)
    elif period == 'DJF':
        print('Calculating over %s months!' % period)
        runs = [future,historical]
        var_mo = np.empty((2,historical.shape[0]-1,historical.shape[2],historical.shape[3]))
        for i in range(len(runs)):
            var_mo[i,:,:,:] = UT.calcDecJanFeb(runs[i],runs[i],lat,lon,'surface',1) 
        futurem = var_mo[0]
        historicalm = var_mo[1]
    elif period == 'JFM':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,0:3,:,:],axis=1)
        historicalm = np.nanmean(historical[:,0:3,:,:],axis=1)
    elif period == 'JF':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,0:2,:,:],axis=1)
        historicalm = np.nanmean(historical[:,0:2,:,:],axis=1)
    elif period == 'FMA':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,1:4,:,:],axis=1)
        historicalm = np.nanmean(historical[:,1:4,:,:],axis=1)
    elif period == 'FM':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,1:3,:,:],axis=1)
        historicalm = np.nanmean(historical[:,1:3,:,:],axis=1)
    elif period == 'J':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,0:1,:,:],axis=1)
        historicalm = np.nanmean(historical[:,0:1,:,:],axis=1)
    elif period == 'F':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,1:2,:,:],axis=1)
        historicalm = np.nanmean(historical[:,1:2,:,:],axis=1)
    elif period == 'M':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,2:3,:,:],axis=1)
        historicalm = np.nanmean(historical[:,2:3,:,:],axis=1)
    elif period == 'MA':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,2:4,:,:],axis=1)
        historicalm = np.nanmean(historical[:,2:4,:,:],axis=1)
    elif period == 'NONE':
        futurem = future
        historicalm = historical
    else:
        print(ValueError('Selected wrong month period!'))

    ############################################################################### 
    ############################################################################### 
    ############################################################################### 
    ### Calculate anomalies
    anom = futurem - historicalm
    
    ### Meshgrid for lat,lon
    lon2,lat2 = np.meshgrid(lon,lat)
    
    ### Calculate SHI
    if period == 'NONE':
        latq = np.where((lat >= latpolar))[0]
        anomp = anom[:,:,latq,:]
        lat2p = lat2[latq,:]
        polarave = UT.calc_weightedAve(anomp,lat2p)
    else:
        latq = np.where((lat >= latpolar))[0]
        anomp = anom[:,latq,:]
        lat2p = lat2[latq,:]
        polarave = UT.calc_weightedAve(anomp,lat2p)
    
    print('\n========Calculated Polar Cap Average========\n')
    return polarave

### Test functions (do not use!)
#ave = PolarCap('AA-2030','TEMP','profile',65,'DJF')

#import matplotlib.pyplot as plt
#import numpy as np
#plt.figure(figsize=(11,4))
#plt.title('Monthly SIC Anomalies')
#plt.plot(ave.ravel())
#plt.savefig('/home/zlabe/Desktop/' + 'monthly_SIC_anom.png',dpi=300)
コード例 #11
0
    nmin = MENS.computeMinEns(future, climo, 0.05)

    ### Calculate only for north of 30N
    latq = np.where(lat > 30)[0]
    lat = lat[latq]
    nmin = nmin[latq, :]

    ### Meshgrid of lat and lon
    lon2, lat2 = np.meshgrid(lon, lat)

    ### Mask values that are not significant
    nminmask = nmin.copy()
    nminmask[np.isnan(nminmask)] = 0.
    nminmask[np.where(nminmask > 0.)] = 1.

    ### Calculated weighted average for only significant points
    if weighted == True:
        avenmin = UT.calc_weightedAve(nmin, lat2)
    elif weighted == False:
        avenmin = np.nanmean(nmin[:, :])

    ### Save to look at all values
    allnmin[v] = avenmin

#    ### Save individual files
#    np.savetxt(directorydataout + '%s_minens95_DJF_%s.txt' % (simu,
#               varnames[v]),np.array([avenmin]),delimiter=',',fmt='%3.1f',
#               footer='\n Minimum number of ensembles needed to get' \
#               '\n statistical significance at the 95% confidence\n' \
#               ' level for DJF',newline='\n\n')
コード例 #12
0
flxpd, lat, lon = readHeatFluxData('ANT_Cu', variableq)
flxpi, lat, lon = readHeatFluxData('ANT_Pi', variableq)

### Create meshgrid for lat/lon
lon2, lat2 = np.meshgrid(lon, lat)

### Mask where historical sea ice is present to calculate heat flux
heatanompd = (flxf - flxpd) * sicpd
heatanompi = (flxf - flxpi) * sicpi

### Only calculate where heat flux is changing over sea ice areas
heatanompd[np.where(heatanompd == 0.)] = np.nan
heatanompi[np.where(heatanompi == 0.)] = np.nan

### Calculate weighted average
avepd = UT.calc_weightedAve(heatanompd, lat2)
avepi = UT.calc_weightedAve(heatanompi, lat2)

print('Completed: Data processed!')
###############################################################################
###############################################################################
###############################################################################
### Create subplots of sea ice anomalies
plt.rc('text', usetex=True)
plt.rc('font', **{'family': 'sans-serif', 'sans-serif': ['Avant Garde']})


def adjust_spines(ax, spines):
    for loc, spine in ax.spines.items():
        if loc in spines:
            spine.set_position(('outward', 5))
コード例 #13
0
    diff_fithit = np.nanmean(varmo_fit - varmo_hit,axis=0)
    diff_ficcit = np.nanmean(varmo_fic - varmo_cit,axis=0)
    
    def calc_iceRatio(varx,vary):
        """
        Compute relative % difference
        """
        print('\n>>> Using calc_iceRatio function!')
        
        diff = varx-vary
        percchange = (diff/vary)*100.0
        
        print('*Completed: Finished calc_iceRatio function!')
        return percchange
    
    meanfithit = UT.calc_weightedAve(diff_fithit[:,latq,:],latnew)
    meanficcit = UT.calc_weightedAve(diff_ficcit[:,latq,:],latnew)
    
    meanratiofithit.append(meanfithit)
    meanratioficcit.append(meanficcit)
meanratiofithit = np.asarray(meanratiofithit)
meanratioficcit = np.asarray(meanratioficcit)

### Calculate ratio
ratio = calc_iceRatio(meanratiofithit,meanratioficcit)
    
#### Save file
np.savetxt(directorydata2 + 'meanratio.txt',ratio.transpose(),delimiter=',',
           fmt='%3.2f',header='  '.join(varnames)+'\n',
           footer='\n File contains pearsonr correlation coefficients' \
           '\n between FIT-HIT and FIC-CIT to get the relative \n' \
コード例 #14
0
 print(dataset,'= Model!')
 print(dataset_obs,'= Observations!\n')
 print(rm_annual_mean,'= rm_annual_mean') 
 print(rm_standard_dev,'= rm_standard_dev') 
 print(rm_merid_mean,'= rm_merid_mean') 
 print(rm_ensemble_mean,'= rm_ensemble_mean') 
 print(land_only,'= land_only')
 print(ocean_only,'= ocean_only')
 
 ## Variables for plotting
 lons2,lats2 = np.meshgrid(lons,lats) 
 observations = data_obs
 modeldata = data
 modeldatamean = np.nanmean(modeldata,axis=0)
 
 spatialmean_obs = UT.calc_weightedAve(observations,lats2)
 spatialmean_mod = UT.calc_weightedAve(modeldata,lats2)
 spatialmean_modmean = np.nanmean(spatialmean_mod,axis=0)
 
 ##############################################################################
 ##############################################################################
 ##############################################################################
 ## Visualizing through LRP
 summaryDT = LRP.deepTaylorAnalysis(model,
                                         np.append(XtrainS,XtestS,axis=0),
                                         np.append(Ytrain,Ytest,axis=0),
                                         biasBool,annType,num_of_class,
                                         yearlabels)
 
 # for training data only
 summaryDTTrain = LRP.deepTaylorAnalysis(
コード例 #15
0
### Read in data
lat_bounds, lon_bounds = UT.regions(reg_name)
ghg, lat1, lon1 = read_primary_dataset(variq, datasetsingle[0], lat_bounds,
                                       lon_bounds, monthlychoice)
aer, lat1, lon1 = read_primary_dataset(variq, datasetsingle[1], lat_bounds,
                                       lon_bounds, monthlychoice)
lens, lat1, lon1 = read_primary_dataset(variq, datasetsingle[2], lat_bounds,
                                        lon_bounds, monthlychoice)
obs, lat1, lon1 = read_obs_dataset(variq, datasetsingle[3], lat_bounds,
                                   lon_bounds, monthlychoice)
obs2, lat1, lon1 = read_obs_dataset(variq, datasetsingle[4], lat_bounds,
                                    lon_bounds, monthlychoice)

### Calculate global average
lon2, lat2 = np.meshgrid(lon1, lat1)
globe_ghg = UT.calc_weightedAve(ghg, lat2)
globe_aer = UT.calc_weightedAve(aer, lat2)
globe_lens = UT.calc_weightedAve(lens, lat2)
globe_obs = UT.calc_weightedAve(obs, lat2)
globe_obs2 = UT.calc_weightedAve(obs2, lat2)

### Calculate ensemble means
meanghg = np.nanmean(globe_ghg, axis=0)
meanaer = np.nanmean(globe_aer, axis=0)
meanlens = np.nanmean(globe_lens, axis=0)

###############################################################################
###############################################################################
###############################################################################
### Create time series
plt.rc('text', usetex=True)
コード例 #16
0
    latn.append(latqq)

    dataallmask = dataall[i]
    dataallmask[np.where(dataallmask > 100)] = np.nan
    dataallmask[np.where(dataallmask < 1)] = np.nan
    newsic = dataallmask
    newsicn = newsic[:, latq, :]
    datan.append(newsicn)

### Average over Arctic
meann = []
lat2 = []
lon2 = []
for i in range(len(datan)):
    lon2n, lat2n = np.meshgrid(lonall[i], latn[i])
    meanq = UT.calc_weightedAve(datan[i], lat2n)
    meann.append(meanq)
    lat2.append(lat2n)
    lon2.append(lon2n)

################################################################################
################################################################################
################################################################################
### Create subplots of sea ice anomalies
plt.rc('text', usetex=True)
plt.rc('font', **{'family': 'sans-serif', 'sans-serif': ['Avant Garde']})


def adjust_spines(ax, spines):
    for loc, spine in ax.spines.items():
        if loc in spines:
コード例 #17
0
ファイル: calc_SHI.py プロジェクト: muskanmahajan37/AA
def SHI(simu, period):
    """
    Script calculates the Siberian High Index
    """
    ### Import modules
    import numpy as np
    import calc_Utilities as UT
    import read_CTLNQ as CONT
    import read_ExpMonthly as NUDG
    import read_ShortCoupled as COUP
    import read_SIT as THICK
    import read_SIC as CONC
    import read_SIT_E3SM as E3SIT
    import read_SIC_E3SM as E3SIC
    import read_OldIceExperi as OLD
    import read_LongCoupled as LC

    ### Select variable
    varia = 'SLP'
    level = 'surface'

    ###############################################################################
    ###############################################################################
    ###############################################################################
    if simu == 'AA-2030':
        lat, lon, lev, future = NUDG.readExperi(varia, 'AA', '2030', level,
                                                'none')
        lat, lon, lev, historical = CONT.readControl(varia, level, 'none')
    elif simu == 'AA-2060':
        lat, lon, lev, future = NUDG.readExperi(varia, 'AA', '2060', level,
                                                'none')
        lat, lon, lev, historical = CONT.readControl(varia, level, 'none')
    elif simu == 'AA-2090':
        lat, lon, lev, future = NUDG.readExperi(varia, 'AA', '2090', level,
                                                'none')
        lat, lon, lev, historical = CONT.readControl(varia, level, 'none')
    ###############################################################################
    elif simu == 'coupled_Pd':
        lat, lon, lev, future = COUP.readCOUPs(varia, 'C_Fu', level)
        lat, lon, lev, historical = COUP.readCOUPs(varia, 'C_Pd', level)
    ###############################################################################
    elif simu == 'coupled_Pi':
        lat, lon, lev, future = COUP.readCOUPs(varia, 'C_Fu', level)
        lat, lon, lev, historical = COUP.readCOUPs(varia, 'C_Pi', level)
    ###############################################################################
    elif simu == 'SIT':
        lat, lon, lev, future = THICK.readSIT(varia, 'SIT_Fu', level)
        lat, lon, lev, historical = THICK.readSIT(varia, 'SIT_Pd', level)
    ###############################################################################
    elif simu == 'SIC_Pd':
        lat, lon, lev, future = CONC.readSIC(varia, 'Fu', level)
        lat, lon, lev, historical = CONC.readSIC(varia, 'Pd', level)
    ###############################################################################
    elif simu == 'SIC_Pi':
        lat, lon, lev, future = CONC.readSIC(varia, 'Fu', level)
        lat, lon, lev, historical = CONC.readSIC(varia, 'Pi', level)
    ###############################################################################
    elif simu == 'E3SIT':
        lat, lon, lev, future = E3SIT.readE3SM_SIT(varia, 'ESIT_Fu', level)
        lat, lon, lev, historical = E3SIT.readE3SM_SIT(varia, 'ESIT_Pd_B',
                                                       level)
    ###############################################################################
    elif simu == 'E3SIC_Pd':
        lat, lon, lev, future = E3SIC.readE3SM_SIC(varia, 'ESIC_Fu', level)
        lat, lon, lev, historical = E3SIC.readE3SM_SIC(varia, 'ESIC_Pd', level)
    elif simu == 'E3SIC_Pi':
        lat, lon, lev, future = E3SIC.readE3SM_SIC(varia, 'ESIC_Fu', level)
        lat, lon, lev, historical = E3SIC.readE3SM_SIC(varia, 'ESIC_Pi', level)
    ###############################################################################
    elif simu == 'OLD':
        lat, lon, lev, future = OLD.readOldIceExperi(varia, 'FICT', level)
        lat, lon, lev, historical = OLD.readOldIceExperi(varia, 'HIT', level)
    ###############################################################################
    elif simu == 'LONG':
        lat, lon, lev, future = LC.readLong(varia, 'Long_Fu', level)
        lat, lon, lev, historical = LC.readLong(varia, 'Long_Pd', level)
    ###############################################################################
    ###############################################################################
    ###############################################################################
    ### Check for missing data [ensembles,months,lat,lon]
    future[np.where(future <= -1e10)] = np.nan
    historical[np.where(historical <= -1e10)] = np.nan

    ###############################################################################
    ###############################################################################
    ###############################################################################
    ### Calculate over period
    if period == 'OND':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, -3:, :, :], axis=1)
        historicalm = np.nanmean(historical[:, -3:, :, :], axis=1)
    elif period == 'D':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, -1:, :, :], axis=1)
        historicalm = np.nanmean(historical[:, -1:, :, :], axis=1)
    elif period == 'DJF':
        print('Calculating over %s months!' % period)
        runs = [future, historical]
        var_mo = np.empty((2, historical.shape[0] - 1, historical.shape[2],
                           historical.shape[3]))
        for i in range(len(runs)):
            var_mo[i, :, :, :] = UT.calcDecJanFeb(runs[i], runs[i], lat, lon,
                                                  'surface', 1)
        futurem = var_mo[0]
        historicalm = var_mo[1]
    elif period == 'JFM':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, 0:3, :, :], axis=1)
        historicalm = np.nanmean(historical[:, 0:3, :, :], axis=1)
    elif period == 'JF':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, 0:2, :, :], axis=1)
        historicalm = np.nanmean(historical[:, 0:2, :, :], axis=1)
    elif period == 'FMA':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, 1:4, :, :], axis=1)
        historicalm = np.nanmean(historical[:, 1:4, :, :], axis=1)
    elif period == 'FM':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, 1:3, :, :], axis=1)
        historicalm = np.nanmean(historical[:, 1:3, :, :], axis=1)
    elif period == 'J':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, 0:1, :, :], axis=1)
        historicalm = np.nanmean(historical[:, 0:1, :, :], axis=1)
    elif period == 'F':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, 1:2, :, :], axis=1)
        historicalm = np.nanmean(historical[:, 1:2, :, :], axis=1)
    elif period == 'M':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, 2:3, :, :], axis=1)
        historicalm = np.nanmean(historical[:, 2:3, :, :], axis=1)
    elif period == 'MA':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, 2:4, :, :], axis=1)
        historicalm = np.nanmean(historical[:, 2:4, :, :], axis=1)
    else:
        print(ValueError('Selected wrong month period!'))

    ###############################################################################
    ###############################################################################
    ###############################################################################
    ### Calculate anomalies
    anom = futurem - historicalm

    ### Meshgrid for lat,lon
    lon2, lat2 = np.meshgrid(lon, lat)

    ### Calculate SHI
    lonq = np.where((lon >= 80) & (lon <= 120))[0]
    latq = np.where((lat >= 40) & (lat <= 65))[0]
    anomlon = anom[:, :, lonq]
    anoms = anomlon[:, latq, :]
    lat2sq = lat2[latq, :]
    lat2s = lat2sq[:, lonq]
    shi = UT.calc_weightedAve(anoms, lat2s)

    print('\n========Calculated Siberian High Index========\n')
    return shi


### Test functions (do not use!)
#shi = SHI('AA-2090','DJF')
コード例 #18
0
def PolarCapVert(simu, varia, level, latpolar, period, levelVert):
    ###############################################################################
    ###############################################################################
    ###############################################################################
    if simu == 'AA-2030':
        lat, lon, lev, future = NUDG.readExperi(varia, 'AA', '2030', level,
                                                'none')
        lat, lon, lev, historical = CONT.readControl(varia, level, 'none')
    elif simu == 'AA-2060':
        lat, lon, lev, future = NUDG.readExperi(varia, 'AA', '2060', level,
                                                'none')
        lat, lon, lev, historical = CONT.readControl(varia, level, 'none')
    elif simu == 'AA-2090':
        lat, lon, lev, future = NUDG.readExperi(varia, 'AA', '2090', level,
                                                'none')
        lat, lon, lev, historical = CONT.readControl(varia, level, 'none')
    ###############################################################################
    elif simu == 'coupled_Pd':
        lat, lon, lev, future = COUP.readCOUPs(varia, 'C_Fu', level)
        lat, lon, lev, historical = COUP.readCOUPs(varia, 'C_Pd', level)
    ###############################################################################
    elif simu == 'coupled_Pi':
        lat, lon, lev, future = COUP.readCOUPs(varia, 'C_Fu', level)
        lat, lon, lev, historical = COUP.readCOUPs(varia, 'C_Pi', level)
    ###############################################################################
    elif simu == 'SIT':
        lat, lon, lev, future = THICK.readSIT(varia, 'SIT_Fu', level)
        lat, lon, lev, historical = THICK.readSIT(varia, 'SIT_Pd', level)
    ###############################################################################
    elif simu == 'SIC_Pd':
        lat, lon, lev, future = CONC.readSIC(varia, 'Fu', level)
        lat, lon, lev, historical = CONC.readSIC(varia, 'Pd', level)
    ###############################################################################
    elif simu == 'SIC_Pi':
        lat, lon, lev, future = CONC.readSIC(varia, 'Fu', level)
        lat, lon, lev, historical = CONC.readSIC(varia, 'Pi', level)
    ###############################################################################
    elif simu == 'E3SIT':
        lat, lon, lev, future = E3SIT.readE3SM_SIT(varia, 'ESIT_Fu', level)
        lat, lon, lev, historical = E3SIT.readE3SM_SIT(varia, 'ESIT_Pd_B',
                                                       level)
    ###############################################################################
    elif simu == 'E3SIC_Pd':
        lat, lon, lev, future = E3SIC.readE3SM_SIC(varia, 'ESIC_Fu', level)
        lat, lon, lev, historical = E3SIC.readE3SM_SIC(varia, 'ESIC_Pd', level)
    elif simu == 'E3SIC_Pi':
        lat, lon, lev, future = E3SIC.readE3SM_SIC(varia, 'ESIC_Fu', level)
        lat, lon, lev, historical = E3SIC.readE3SM_SIC(varia, 'ESIC_Pi', level)
    ###############################################################################
    elif simu == 'OLD':
        lat, lon, lev, future = OLD.readOldIceExperi(varia, 'FICT', level)
        lat, lon, lev, historical = OLD.readOldIceExperi(varia, 'HIT', level)
    ###############################################################################
    elif simu == 'LONG':
        lat, lon, lev, future = LC.readLong(varia, 'Long_Fu', level)
        lat, lon, lev, historical = LC.readLong(varia, 'Long_Pd', level)
    ###############################################################################
    ###############################################################################
    ###############################################################################
    ### Check for missing data [ensembles,months,lat,lon]
    future[np.where(future <= -1e10)] = np.nan
    historical[np.where(historical <= -1e10)] = np.nan

    ###############################################################################
    ###############################################################################
    ###############################################################################
    ### Calculate over period
    if period == 'OND':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, -3:, :, :], axis=1)
        historicalm = np.nanmean(historical[:, -3:, :, :], axis=1)
    elif period == 'D':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, -1:, :, :], axis=1)
        historicalm = np.nanmean(historical[:, -1:, :, :], axis=1)
    elif period == 'DJF':
        print('Calculating over %s months!' % period)
        runs = [future, historical]
        var_mo = np.empty((2, historical.shape[0] - 1, historical.shape[2],
                           historical.shape[3], historical.shape[4]))
        for i in range(len(runs)):
            var_mo[i, :, :, :, :] = UT.calcDecJanFeb(runs[i], runs[i], lat,
                                                     lon, level, 17)
        futurem = var_mo[0]
        historicalm = var_mo[1]
    elif period == 'JFM':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, 0:3, :, :], axis=1)
        historicalm = np.nanmean(historical[:, 0:3, :, :], axis=1)
    elif period == 'JF':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, 0:2, :, :], axis=1)
        historicalm = np.nanmean(historical[:, 0:2, :, :], axis=1)
    elif period == 'FMA':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, 1:4, :, :], axis=1)
        historicalm = np.nanmean(historical[:, 1:4, :, :], axis=1)
    elif period == 'FM':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, 1:3, :, :], axis=1)
        historicalm = np.nanmean(historical[:, 1:3, :, :], axis=1)
    elif period == 'J':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, 0:1, :, :], axis=1)
        historicalm = np.nanmean(historical[:, 0:1, :, :], axis=1)
    elif period == 'F':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, 1:2, :, :], axis=1)
        historicalm = np.nanmean(historical[:, 1:2, :, :], axis=1)
    elif period == 'M':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, 2:3, :, :], axis=1)
        historicalm = np.nanmean(historical[:, 2:3, :, :], axis=1)
    elif period == 'MA':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:, 2:4, :, :], axis=1)
        historicalm = np.nanmean(historical[:, 2:4, :, :], axis=1)
    elif period == 'NONE':
        futurem = future
        historicalm = historical
    else:
        print(ValueError('Selected wrong month period!'))

    ###############################################################################
    ###############################################################################
    ###############################################################################
    ### Calculate anomalies
    anom = futurem - historicalm

    ### Meshgrid for lat,lon
    lon2, lat2 = np.meshgrid(lon, lat)

    ### Calculate SHI
    latq = np.where((lat >= latpolar))[0]
    anomp = anom[:, :, latq, :]
    lat2p = lat2[latq, :]
    polarave = UT.calc_weightedAve(anomp, lat2p)

    ### Calculate ensemble mean
    polaraveMean = np.nanmean(polarave, axis=0)

    ###############################################################################
    ###############################################################################
    ###############################################################################
    ### Calculate vertical levels and save file
    levqq = np.where((lev >= levelVert))[0]
    levvv = lev[levqq]
    polaraveMeanvvv = polaraveMean[levqq]

    ### Save file
    np.savetxt(directorydata + '%s_1000-%s_%s.txt' % (simu, levelVert, varia),
               polaraveMeanvvv,
               delimiter=',',
               fmt='%.3f')
    np.savetxt(directorydata + 'Levels_1000-%s_%s.txt' % (levelVert, varia),
               levvv,
               delimiter=',',
               fmt='%.1f')

    print('\n========Calculated Polar Cap Average========\n')
    return polaraveMeanvvv, levvv


###############################################################################
###############################################################################
###############################################################################
### Test functions (do not use!)

#aveAA30,lev = PolarCapVert('AA-2030','TEMP','profile',65,'DJF',500)
#aveAA60,lev = PolarCapVert('AA-2060','TEMP','profile',65,'DJF',500)
#aveAA90,lev = PolarCapVert('AA-2090','TEMP','profile',65,'DJF',500)
################################################################################
#ave_coupPd,lev = PolarCapVert('coupled_Pd','TEMP','profile',65,'DJF',500)
#ave_coupPi,lev = PolarCapVert('coupled_Pi','TEMP','profile',65,'DJF',500)
################################################################################
#ave_SIT,lev = PolarCapVert('SIT','TEMP','profile',65,'DJF',500)
#ave_SICPd,lev = PolarCapVert('SIC_Pd','TEMP','profile',65,'DJF',500)
#ave_SICPi,lev = PolarCapVert('SIC_Pi','TEMP','profile',65,'DJF',500)
###############################################################################
#ave_NET,lev = PolarCapVert('OLD','TEMP','profile',65,'DJF',500)
###############################################################################
#ave_ESICPd,lev = PolarCapVert('E3SIC_Pd','TEMP','profile',65,'DJF',500)
#ave_ESICPi,lev = PolarCapVert('E3SIC_Pi','TEMP','profile',65,'DJF',500)
コード例 #19
0
def readVariables(varnames):
    lat, lon, time, lev, tashit = DO.readMeanExperiAll('%s' % varnames, 'HIT',
                                                       'surface')
    lat, lon, time, lev, tasFIT = DO.readMeanExperiAll('%s' % varnames, 'FIT',
                                                       'surface')

    ### Create 2d array of latitude and longitude
    lon2, lat2 = np.meshgrid(lon, lat)

    ### Read in QBO phases
    filenamehitp = directorydata + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[0]
    filenamehitn = directorydata + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[2]
    filenamehitp2 = directorydata2 + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[0]
    filenamehitn2 = directorydata2 + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[2]
    pos_hit = np.append(
        np.genfromtxt(filenamehitp, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamehitp2, unpack=True, usecols=[0], dtype='int') +
        100)
    neg_hit = np.append(
        np.genfromtxt(filenamehitn, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamehitn2, unpack=True, usecols=[0], dtype='int') +
        100)

    filenameFITp = directorydata + 'FIT/monthly/QBO_%s_FIT.txt' % qbophase[0]
    filenameFITn = directorydata + 'FIT/monthly/QBO_%s_FIT.txt' % qbophase[2]
    filenameFITp2 = directorydata2 + 'FIT/monthly/QBO_%s_FIT.txt' % qbophase[0]
    filenameFITn2 = directorydata2 + 'FIT/monthly/QBO_%s_FIT.txt' % qbophase[2]
    pos_FIT = np.append(
        np.genfromtxt(filenameFITp, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenameFITp2, unpack=True, usecols=[0], dtype='int') +
        100)
    neg_FIT = np.append(
        np.genfromtxt(filenameFITn, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenameFITn2, unpack=True, usecols=[0], dtype='int') +
        100)

    tas_mo = [tashit, tasFIT]

    ### Composite by QBO phase
    tas_mohitpos = tas_mo[0][pos_hit, :, :, :]
    tas_moFITpos = tas_mo[1][pos_FIT, :, :, :]

    tas_mohitneg = tas_mo[0][neg_hit, :, :, :]
    tas_moFITneg = tas_mo[1][neg_FIT, :, :, :]

    ### Compute comparisons for months - select region
    if varnames == 'SLP':
        lonq = np.where((lon >= 80) & (lon <= 120))[0]
        FITpos = tas_moFITpos[:, :, :, lonq]
        FITneg = tas_moFITneg[:, :, :, lonq]
        latq = np.where((lat >= 40) & (lat <= 65))[0]
        FITpos = FITpos[:, :, latq]
        FITneg = FITneg[:, :, latq]
        lat2sq = lat2[latq, :]
        lat2s = lat2sq[:, lonq]
        FITpos = UT.calc_weightedAve(FITpos, lat2s)
        FITneg = UT.calc_weightedAve(FITneg, lat2s)

        hitpos = tas_mohitpos[:, :, :, lonq]
        hitneg = tas_mohitneg[:, :, :, lonq]
        hitpos = hitpos[:, :, latq]
        hitneg = hitneg[:, :, latq]
        hitpos = UT.calc_weightedAve(hitpos, lat2s)
        hitneg = UT.calc_weightedAve(hitneg, lat2s)
    elif varnames == 'T1000':
        lonq = np.where((lon >= 70) & (lon <= 140))[0]
        FITpos = tas_moFITpos[:, :, :, lonq]
        FITneg = tas_moFITneg[:, :, :, lonq]
        latq = np.where((lat >= 35) & (lat <= 60))[0]
        FITpos = FITpos[:, :, latq]
        FITneg = FITneg[:, :, latq]
        lat2sq = lat2[latq, :]
        lat2s = lat2sq[:, lonq]
        FITpos = UT.calc_weightedAve(FITpos, lat2s) - 273.15
        FITneg = UT.calc_weightedAve(FITneg, lat2s) - 273.15

        hitpos = tas_mohitpos[:, :, :, lonq]
        hitneg = tas_mohitneg[:, :, :, lonq]
        hitpos = hitpos[:, :, latq]
        hitneg = hitneg[:, :, latq]
        hitpos = UT.calc_weightedAve(hitpos, lat2s) - 273.15
        hitneg = UT.calc_weightedAve(hitneg, lat2s) - 273.15

    diffruns = [
        FITpos.squeeze(),
        FITneg.squeeze(),
        hitpos.squeeze(),
        hitneg.squeeze()
    ]

    return diffruns
コード例 #20
0
lons2, lats2 = np.meshgrid(lons, lats[latq])

varf = varf[:, latq, :]
varh = varh[:, latq, :]
varcf = varcf[:, latq, :]
varch = varch[:, latq, :]

### Calculate months
varnf = np.append(varf[9:], varf[:3], axis=0)
varnh = np.append(varh[9:], varh[:3], axis=0)

varncf = np.append(varcf[9:], varcf[:3], axis=0)
varnch = np.append(varch[9:], varch[:3], axis=0)

### Calculate monthly averages
sitfmean = UT.calc_weightedAve(varnf, lats2)
sithmean = UT.calc_weightedAve(varnh, lats2)

sicfmean = UT.calc_weightedAve(varncf, lats2)
sichmean = UT.calc_weightedAve(varnch, lats2)

print('Completed: Data processed!')
###############################################################################
###############################################################################
###############################################################################
### Create subplots of sea ice anomalies
plt.rc('text', usetex=True)
plt.rc('font', **{'family': 'sans-serif', 'sans-serif': ['Avant Garde']})


def adjust_spines(ax, spines):
コード例 #21
0
### Slice over region of interest for Eurasia (40-80N,35-180E)
latq = np.where((lat >= 40) & (lat <= 80))[0]
lonq = np.where((lon >= 35) & (lon <=180))[0]
latn = lat[latq]
lonn = lon[lonq]
lon2,lat2 = np.meshgrid(lonn,latn)

modlat = mod[:,:,latq,:]
modlon = modlat[:,:,:,lonq]
modslice = modlon.copy()

### Consider years 1979-2015
modsliceq = modslice[:,:-1]

### Calculate average snow index
snowindex = UT.calc_weightedAve(modsliceq,lat2)

### Calculate detrended snow index
snowindexdt = SS.detrend(snowindex,type='linear',axis=1)

### Save both indices
np.savetxt(directoryoutput + 'SWE_Eurasia_ON.txt',
           np.vstack([years,snowindex]).transpose(),delimiter=',',fmt='%3.1f',
           footer='\n Snow cover index calculated for each' \
           '\n experiment [CSST,CSIC,AMIP,AMQ,AMS,AMQS]\n' \
           ' in Oct-Nov',newline='\n\n')
np.savetxt(directoryoutput + 'SWE_Eurasia_ON_DETRENDED.txt',
           np.vstack([years,snowindexdt]).transpose(),delimiter=',',fmt='%3.1f',
           footer='\n Snow cover index calculated for each' \
           '\n experiment [CSST,CSIC,AMIP,AMQ,AMS,AMQS]\n' \
           ' in Oct-Nov ---> detrended data',newline='\n\n')
コード例 #22
0
mean_aer = np.nanmean(aer[:, yearq, :, :], axis=1)
mean_lens = np.nanmean(lens[:, yearq, :, :], axis=1)
mean_obs = np.nanmean(obs[yearq, :, :], axis=0)

anom_ghg = np.empty((ghg.shape))
anom_aer = np.empty((aer.shape))
anom_lens = np.empty((lens.shape))
for i in range(ghg.shape[1]):
    anom_ghg[:, i, :, :] = ghg[:, i, :, :] - mean_ghg
    anom_aer[:, i, :, :] = aer[:, i, :, :] - mean_aer
    anom_lens[:, i, :, :] = lens[:, i, :, :] - mean_lens
anom_obs = obs - mean_obs

### Calculate global average
lon2, lat2 = np.meshgrid(lon1, lat1)
globe_ghg = UT.calc_weightedAve(anom_ghg, lat2)
globe_aer = UT.calc_weightedAve(anom_aer, lat2)
globe_lens = UT.calc_weightedAve(anom_lens, lat2)
globe_obs = UT.calc_weightedAve(anom_obs, lat2)

### Calculate ensemble means
ensanom_ghg = np.nanmean(globe_ghg, axis=0)
ensanom_aer = np.nanmean(globe_aer, axis=0)
ensanom_lens = np.nanmean(globe_lens, axis=0)

###############################################################################
###############################################################################
###############################################################################
### Create time series
plt.rc('text', usetex=True)
plt.rc('font', **{'family': 'sans-serif', 'sans-serif': ['Avant Garde']})
コード例 #23
0
def readData(simu,period,varia,level,latpolar,cps):
    ############################################################################### 
    ############################################################################### 
    ############################################################################### 
    if simu == 'AA-2030':
        lat,lon,lev,future = NUDG.readExperi(varia,'AA','2030',level,'none')
        lat,lon,lev,historical = CONT.readControl(varia,level,'none')
    elif simu == 'AA-2060':
        lat,lon,lev,future = NUDG.readExperi(varia,'AA','2060',level,'none')
        lat,lon,lev,historical = CONT.readControl(varia,level,'none')
    elif simu == 'AA-2090':
        lat,lon,lev,future = NUDG.readExperi(varia,'AA','2090',level,cps)
        lat,lon,lev,historical = CONT.readControl(varia,level,cps)
    ############################################################################### 
    elif simu == 'coupled':
        lat,lon,lev,future = COUP.readCOUPs(varia,'C_Fu',level)
        lat,lon,lev,historical = COUP.readCOUPs(varia,'C_Pd',level)        
    ###############################################################################        
    elif simu == 'SIT':
        lat,lon,lev,future = THICK.readSIT(varia,'SIT_Fu',level)
        lat,lon,lev,historical = THICK.readSIT(varia,'SIT_Pd',level)
    ############################################################################### 
    elif simu == 'SIC':
        lat,lon,lev,future = CONC.readSIC(varia,'Fu',level)
        lat,lon,lev,historical = CONC.readSIC(varia,'Pd',level)
    ############################################################################### 
    ############################################################################### 
    ############################################################################### 
    ### Calculate number of ensembles
    nens = np.shape(historical)[0]

    ### Check for missing data [ensembles,months,lat,lon]
    future[np.where(future <= -1e10)] = np.nan
    historical[np.where(historical <= -1e10)] = np.nan
    
    ############################################################################### 
    ############################################################################### 
    ############################################################################### 
    ### Calculate over period
    if period == 'OND':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,-3:],axis=1)
        historicalm = np.nanmean(historical[:,-3:],axis=1)
    elif period == 'D':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,-1:],axis=1)
        historicalm = np.nanmean(historical[:,-1:],axis=1)
    elif period == 'DJF':
        print('Calculating over %s months!' % period)
        runs = [future,historical]
        var_mo = np.empty((2,historical.shape[0]-1,historical.shape[2],historical.shape[3],historical.shape[4]))
        for i in range(len(runs)):
            var_mo[i,:,:,:,:] = UT.calcDecJanFeb(runs[i],runs[i],lat,lon,level,17) 
        futurem = var_mo[0]
        historicalm = var_mo[1]
    elif period == 'JFM':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,0:3],axis=1)
        historicalm = np.nanmean(historical[:,0:3],axis=1)
    elif period == 'JF':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,0:2],axis=1)
        historicalm = np.nanmean(historical[:,0:2],axis=1)
    elif period == 'FMA':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,1:4],axis=1)
        historicalm = np.nanmean(historical[:,1:4],axis=1)
    elif period == 'FM':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,1:3],axis=1)
        historicalm = np.nanmean(historical[:,1:3],axis=1)
    elif period == 'J':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,0:1],axis=1)
        historicalm = np.nanmean(historical[:,0:1],axis=1)
    elif period == 'F':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,1:2],axis=1)
        historicalm = np.nanmean(historical[:,1:2],axis=1)
    elif period == 'M':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,2:3],axis=1)
        historicalm = np.nanmean(historical[:,2:3],axis=1)
    elif period == 'MA':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future[:,2:4],axis=1)
        historicalm = np.nanmean(historical[:,2:4],axis=1)
    elif period == 'annual':
        print('Calculating over %s months!' % period)
        futurem = np.nanmean(future,axis=1)
        historicalm = np.nanmean(historical,axis=1)
    elif period == 'NONE':
        print('Calculating over %s months!' % period)
        futurem = future
        historicalm = historical
    elif period == 'timemonth':
        print('Calculating over O,N,D,J,F,M months!')
        futurem = np.append(future[:,-3:,:,:,:],future[:,:3,:,:,:],axis=1)
        historicalm = np.append(historical[:,-3:,:,:,:],historical[:,:3,:,:,:],axis=1)
    else:
        print(ValueError('Selected wrong month period!'))

    ###########################################################################
    ###########################################################################
    ###########################################################################
    ### Calculate polar cap
    lon2,lat2 = np.meshgrid(lon,lat)
    
    ### Calculate SHI
    latq = np.where((lat >= latpolar))[0]
    lat2p = lat2[latq,:]
        
    futurep = futurem[:,:,:,latq,:]
    futuremz = UT.calc_weightedAve(futurep,lat2p)
    
    historicalp = historicalm[:,:,:,latq,:]
    historicalmz = UT.calc_weightedAve(historicalp,lat2p)
    
    ### Calculate anomalies [ens,level,lat]
    anom = futuremz - historicalmz

    ### Calculate ensemble mean
    anommean = np.nanmean(anom,axis=0)
    
    ### Calculate significance
    pruns = UT.calc_FDR_ttest(futuremz,historicalmz,0.05) #FDR
    
    ### Select climo
    climo = np.nanmean(historicalmz,axis=0)
    
    return lat,lon,lev,anommean,nens,pruns,climo
コード例 #24
0
def readWAF(varnames, runnames, experiments, qbophase):
    """
    Function reads in WAF data for listed experiments

    Parameters
    ----------
    varnames : string
        variable to download
    runnames : list of strings
        model experiments to read in
    experiments : list of strings
        model simulations to compare
    qbophase : list of strings
        list of qbo phases

    Returns
    -------
    diffruns : list of arrays
        arrays for each experiment variable
    pruns : list of arrays
        arrays of p-values for each experiment variable
    lev : 1d array
        leves

    Usage
    -----
    diffruns,pruns,lev = readWAF(varnames,runnames,experiments,qbophase)
    """
    print('\n>>> Using readWAF function!')

    ### Call functions for variable profile data for polar cap
    lat, lon, time, lev, varhit = DO.readMeanExperiAll('%s' % varnames, 'HIT',
                                                       'profile3')
    lat, lon, time, lev, varfict = DO.readMeanExperiAll(
        '%s' % varnames, 'FICT', 'profile3')

    ### Create 2d array of latitude and longitude
    lon2, lat2 = np.meshgrid(lon, lat)

    ### Read in QBO phases
    filenamehitp = directorydata + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[0]
    filenamehitno = directorydata + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[1]
    filenamehitn = directorydata + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[2]
    filenamehitp2 = directorydata2 + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[0]
    filenamehitno2 = directorydata2 + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[1]
    filenamehitn2 = directorydata2 + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[2]
    pos_hit = np.append(
        np.genfromtxt(filenamehitp, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamehitp2, unpack=True, usecols=[0], dtype='int') +
        100)
    non_hit = np.append(
        np.genfromtxt(filenamehitno, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamehitno2, unpack=True, usecols=[0], dtype='int') +
        100)
    neg_hit = np.append(
        np.genfromtxt(filenamehitn, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamehitn2, unpack=True, usecols=[0], dtype='int') +
        100)

    filenamefictp = directorydata + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[0]
    filenamefictno = directorydata + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[
        1]
    filenamefictn = directorydata + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[2]
    filenamefictp2 = directorydata2 + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[
        0]
    filenamefictno2 = directorydata2 + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[
        1]
    filenamefictn2 = directorydata2 + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[
        2]
    pos_fict = np.append(
        np.genfromtxt(filenamefictp, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamefictp2, unpack=True, usecols=[0], dtype='int') +
        100)
    non_fict = np.append(
        np.genfromtxt(filenamefictno, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamefictno2, unpack=True, usecols=[0], dtype='int') +
        100)
    neg_fict = np.append(
        np.genfromtxt(filenamefictn, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamefictn2, unpack=True, usecols=[0], dtype='int') +
        100)

    ### Concatonate runs
    var_mo = [varhit, varfict]

    ### Save memory
    del varhit
    del varfict

    ### Composite by QBO phase
    var_mohitpos = var_mo[0][pos_hit, :]
    var_mofictpos = var_mo[1][pos_fict, :]

    var_mohitnon = var_mo[0][non_hit, :]
    var_mofictnon = var_mo[1][non_fict, :]

    var_mohitneg = var_mo[0][neg_hit, :]
    var_mofictneg = var_mo[1][neg_fict, :]

    ### Compute comparisons for months - taken ensemble average
    ficthitpos = np.nanmean(var_mofictpos - var_mohitpos, axis=0)
    ficthitnon = np.nanmean(var_mofictnon - var_mohitnon, axis=0)
    ficthitneg = np.nanmean(var_mofictneg - var_mohitneg, axis=0)

    ficthitposa = UT.calc_weightedAve(ficthitpos, lat2)
    ficthitnona = UT.calc_weightedAve(ficthitnon, lat2)
    ficthitnega = UT.calc_weightedAve(ficthitneg, lat2)

    diffruns = [ficthitposa, ficthitnona, ficthitnega]

    ### Calculate significance for days
    stat_FICTHITpos, pvalue_FICTHITpos = UT.calc_indttest(
        var_mo[1][pos_fict, :], var_mo[0][pos_hit, :])
    stat_FICTHITnon, pvalue_FICTHITnon = UT.calc_indttest(
        var_mo[1][non_fict, :], var_mo[0][non_hit, :])
    stat_FICTHITneg, pvalue_FICTHITneg = UT.calc_indttest(
        var_mo[1][neg_fict, :], var_mo[0][neg_hit, :])

    pvalue_FICTHITposa = UT.calc_weightedAve(pvalue_FICTHITpos, lat2)
    pvalue_FICTHITnona = UT.calc_weightedAve(pvalue_FICTHITnon, lat2)
    pvalue_FICTHITnega = UT.calc_weightedAve(pvalue_FICTHITneg, lat2)

    pruns = [pvalue_FICTHITposa, pvalue_FICTHITnona, pvalue_FICTHITnega]

    print('\n*Completed: Finished readWAF function!')
    return diffruns, pruns, lev
コード例 #25
0
def readVariables(varnames,period,region):
    lat,lon,time,lev,tashit = DO.readMeanExperiAll('%s' % varnames,
                                                'HIT','surface')
    lat,lon,time,lev,tasfict = DO.readMeanExperiAll('%s' % varnames,
                                                'FICT','surface')
    
    ### Create 2d array of latitude and longitude
    lon2,lat2 = np.meshgrid(lon,lat)
    
    ### Read in QBO phases 
    filenamehitp = directorydata + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[0]
    filenamehitno = directorydata + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[1]
    filenamehitn = directorydata + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[2]
    filenamehitp2 = directorydata2 + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[0]
    filenamehitno2 = directorydata2 + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[1]
    filenamehitn2 = directorydata2 + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[2]
    pos_hit = np.append(np.genfromtxt(filenamehitp,unpack=True,usecols=[0],dtype='int'),
                        np.genfromtxt(filenamehitp2,unpack=True,usecols=[0],dtype='int')+100)
    non_hit = np.append(np.genfromtxt(filenamehitno,unpack=True,usecols=[0],dtype='int'),
                        np.genfromtxt(filenamehitno2,unpack=True,usecols=[0],dtype='int')+100)
    neg_hit = np.append(np.genfromtxt(filenamehitn,unpack=True,usecols=[0],dtype='int'),
                        np.genfromtxt(filenamehitn2,unpack=True,usecols=[0],dtype='int')+100)    
    
    filenamefictp = directorydata + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[0]
    filenamefictno = directorydata + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[1]
    filenamefictn = directorydata + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[2]
    filenamefictp2 = directorydata2 + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[0]
    filenamefictno2 = directorydata2 + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[1]
    filenamefictn2 = directorydata2 + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[2]
    pos_fict = np.append(np.genfromtxt(filenamefictp,unpack=True,usecols=[0],dtype='int'),
                        np.genfromtxt(filenamefictp2,unpack=True,usecols=[0],dtype='int')+100)
    non_fict = np.append(np.genfromtxt(filenamefictno,unpack=True,usecols=[0],dtype='int'),
                        np.genfromtxt(filenamefictno2,unpack=True,usecols=[0],dtype='int')+100)
    neg_fict = np.append(np.genfromtxt(filenamefictn,unpack=True,usecols=[0],dtype='int'),
                        np.genfromtxt(filenamefictn2,unpack=True,usecols=[0],dtype='int')+100)
    
    ### Concatonate runs
    runs = [tashit,tasfict]
    
    ### Separate per periods (ON,DJ,FM)
    if period == 'D':
        tas_mo= np.empty((2,tashit.shape[0],90,tashit.shape[2],tashit.shape[3]))
        for i in range(len(runs)):
            tas_mo[i] = runs[i][:,60:150,:,:]
#            tas_mo[i] = np.nanmean(runs[i][:,-2:,:,:],axis=1)
    else:
        ValueError('Wrong period selected!')
        
    ### Composite by QBO phase    
    tas_mohitpos = tas_mo[0][pos_hit,:,:,:] 
    tas_mofictpos = tas_mo[1][pos_fict,:,:,:] 
    
    tas_mohitnon = tas_mo[0][non_hit,:,:,:] 
    tas_mofictnon = tas_mo[1][non_fict,:,:,:] 
    
    tas_mohitneg = tas_mo[0][neg_hit,:,:,:] 
    tas_mofictneg = tas_mo[1][neg_fict,:,:,:] 
    
    ### Compute comparisons for months - select region
    if region == 'Atlantic':
        lonq = np.append(np.where((lon >=310) & (lon <=360))[0],
                             np.where((lon >=0) & (lon <=20))[0],axis=0)
    elif region == 'Pacific':
        lonq = np.where((lon >= 120) & (lon <= 170))[0]
        
    ficthitpos = tas_mofictpos[:,:,:,lonq]
    ficthitnon = tas_mofictnon[:,:,:,lonq] 
    ficthitneg = tas_mofictneg[:,:,:,lonq]
    
    ### Calculate upper lats
    latqu = np.where((lat >=60) & (lat <=90))[0]
    ficthitposu = ficthitpos[:,:,latqu]
    ficthitnonu = ficthitnon[:,:,latqu] 
    ficthitnegu = ficthitneg[:,:,latqu]
    lat2squ = lat2[latqu,:]
    lat2su = lat2squ[:,lonq]
    ficthitposuu = UT.calc_weightedAve(ficthitposu,lat2su)
    ficthitnonuu = UT.calc_weightedAve(ficthitnonu,lat2su)
    ficthitneguu = UT.calc_weightedAve(ficthitnegu,lat2su)
    
    ### Calculate lower lats
    latql = np.where((lat >=20) & (lat <=50))[0]
    ficthitposl = ficthitpos[:,:,latql]
    ficthitnonl = ficthitnon[:,:,latql] 
    ficthitnegl = ficthitneg[:,:,latql]
    lat2sql = lat2[latql,:]
    lat2sl = lat2sql[:,lonq]
    ficthitposll = UT.calc_weightedAve(ficthitposl,lat2sl)
    ficthitnonll = UT.calc_weightedAve(ficthitnonl,lat2sl)
    ficthitnegll = UT.calc_weightedAve(ficthitnegl,lat2sl)
    
    ### Calculate Zonal Index (Z500u - Z500l)
    zdiffpos = ficthitposll - ficthitposuu
    zdiffnon = ficthitnonll - ficthitnonuu 
    zdiffneg = ficthitnegll - ficthitneguu
    diffruns_fict = [zdiffpos,zdiffnon,zdiffneg]
    
    ###########################################################################
    ### Calculate for HIT
    hitpos = tas_mohitpos[:,:,:,lonq]
    hitnon = tas_mohitnon[:,:,:,lonq] 
    hitneg = tas_mohitneg[:,:,:,lonq]
    
    ### Calculate upper lats
    latqu = np.where((lat >=60) & (lat <=90))[0]
    hitposu = hitpos[:,:,latqu]
    hitnonu = hitnon[:,:,latqu] 
    hitnegu = hitneg[:,:,latqu]
    lat2squ = lat2[latqu,:]
    lat2su = lat2squ[:,lonq]
    hitposuu = UT.calc_weightedAve(hitposu,lat2su)
    hitnonuu = UT.calc_weightedAve(hitnonu,lat2su)
    hitneguu = UT.calc_weightedAve(hitnegu,lat2su)
    
    ### Calculate lower lats
    latql = np.where((lat >=20) & (lat <=50))[0]
    hitposl = hitpos[:,:,latql]
    hitnonl = hitnon[:,:,latql] 
    hitnegl = hitneg[:,:,latql]
    lat2sql = lat2[latql,:]
    lat2sl = lat2sql[:,lonq]
    hitposll = UT.calc_weightedAve(hitposl,lat2sl)
    hitnonll = UT.calc_weightedAve(hitnonl,lat2sl)
    hitnegll = UT.calc_weightedAve(hitnegl,lat2sl)
    
    ### Calculate Zonal Index (Z500u - Z500l)
    zdiffposh = hitposll - hitposuu
    zdiffnonh = hitnonll - hitnonuu 
    zdiffnegh = hitnegll - hitneguu
    diffruns_hit = [zdiffposh,zdiffnonh,zdiffnegh]
    
    return diffruns_fict,diffruns_hit,lat,lon
コード例 #26
0
difftotallhsh = [difftotal_FITHIT,difftotal_FICCIT,difftotal_FICTHIT]

### Take average above 40N
latq = np.where(lat > 40)[0]
latslice = lat[latq]
lon2,lat2 = np.meshgrid(lon,latslice)

### Mask out values not over SIC grid cells
rnetvals = []
for i in range(len(difftotallhsh)):
    rnetvalsq = difftotallhsh[i] * sicn
    rnetvalsq[np.where(rnetvalsq == 0.0)] = np.nan
    rnetvalsq = rnetvalsq[:,latq,:]
    
    rnetvals.append(rnetvalsq)
    
### Calculated weighted average 
weightedrnet = np.empty((len(rnetvals),sicn.shape[0]))
for i in range(len(rnetvals)):
    weightedrnet[i,:] = UT.calc_weightedAve(rnetvals[i],lat2)
    
### Create files for rnet
np.savetxt(directorydata2 + 'weightedsic_rnets.txt',weightedrnet.transpose(),
           delimiter=',',header='  '.join(experiments)+'\n',
       footer='\n File contains net surface energy flux response' \
       '\n which are weighted above 40N for SIC cells >10% \n' \
       ' in all months of the year',newline='\n\n')

print('Completed: Script done!')

コード例 #27
0
        percchange = (abs(varx) / abs(vary)) * 100.

        ### Test if real values
        if np.isnan(percchange).all() == True:
            percchange[np.where(np.isnan(percchange))] = 0.0
        if percchange > 500:
            percchange = 0.0

        print('*Completed: Finished calc_iceRatio function!')
        return percchange, varx, vary

    fithitave = np.empty((3))
    ficcitave = np.empty((3))
    for i in range(len(fithit)):
        fithitave[i] = UT.calc_weightedAve(abs(fithit[i]), latnew)
        ficcitave[i] = UT.calc_weightedAve(abs(ficcit[i]), latnew)

    ratio = []
    for i in range(len(fithit)):
        percchangeq, varx, vary = calc_iceRatio(fithitave[i], ficcitave[i],
                                                False, 95, 5)

        ratio.append(percchangeq)
    ratiovar.append(ratio)
meanratiovar = np.asarray(ratiovar).squeeze()

#### Save file
np.savetxt(directorydata2 + 'sicsitratio_DJF.txt',np.round(meanratiovar.transpose(),1),delimiter=',',
           fmt='%3.1f',header='  '.join(varnames)+'\n',
           footer='\n File contains ratio values of relative contributions' \
コード例 #28
0
def readVariablesSLP(varnames, period, location):
    lat, lon, time, lev, tashit = DO.readMeanExperiAll('%s' % varnames, 'HIT',
                                                       'surface')
    lat, lon, time, lev, tasfict = DO.readMeanExperiAll(
        '%s' % varnames, 'FICT', 'surface')

    ### Create 2d array of latitude and longitude
    lon2, lat2 = np.meshgrid(lon, lat)

    ### Read in QBO phases
    filenamehitp = directorydata + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[0]
    filenamehitn = directorydata + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[2]
    filenamehitp2 = directorydata2 + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[0]
    filenamehitn2 = directorydata2 + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[2]
    pos_hit = np.append(
        np.genfromtxt(filenamehitp, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamehitp2, unpack=True, usecols=[0], dtype='int') +
        100)
    neg_hit = np.append(
        np.genfromtxt(filenamehitn, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamehitn2, unpack=True, usecols=[0], dtype='int') +
        100)

    filenamefictp = directorydata + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[0]
    filenamefictn = directorydata + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[2]
    filenamefictp2 = directorydata2 + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[
        0]
    filenamefictn2 = directorydata2 + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[
        2]
    pos_fict = np.append(
        np.genfromtxt(filenamefictp, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamefictp2, unpack=True, usecols=[0], dtype='int') +
        100)
    neg_fict = np.append(
        np.genfromtxt(filenamefictn, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamefictn2, unpack=True, usecols=[0], dtype='int') +
        100)

    ### Concatonate runs
    runs = [tashit, tasfict]

    ### Separate per periods (ON,DJ,FM)
    if period == 'D':
        tas_mo = np.empty(
            (2, tashit.shape[0], 31, tashit.shape[2], tashit.shape[3]))
        for i in range(len(runs)):
            tas_mo[i] = runs[i][:, 90:121, :, :]
    else:
        ValueError('Wrong period selected!')

    ### Composite by QBO phase
    tas_mohitpos = tas_mo[0][pos_hit, :, :, :]
    tas_mofictpos = tas_mo[1][pos_fict, :, :, :]

    tas_mohitneg = tas_mo[0][neg_hit, :, :, :]
    tas_mofictneg = tas_mo[1][neg_fict, :, :, :]

    ### Compute comparisons for months - select region
    if varnames == 'SLP':
        lonq = np.where((lon >= 80) & (lon <= 120))[0]
        fictpos = tas_mofictpos[:, :, :, lonq]
        fictneg = tas_mofictneg[:, :, :, lonq]
        latq = np.where((lat >= 40) & (lat <= 65))[0]
        fictpos = fictpos[:, :, latq]
        fictneg = fictneg[:, :, latq]
        lat2sq = lat2[latq, :]
        lat2s = lat2sq[:, lonq]
        fictpos = UT.calc_weightedAve(fictpos, lat2s)
        fictneg = UT.calc_weightedAve(fictneg, lat2s)

        hitpos = tas_mohitpos[:, :, :, lonq]
        hitneg = tas_mohitneg[:, :, :, lonq]
        hitpos = hitpos[:, :, latq]
        hitneg = hitneg[:, :, latq]
        hitpos = UT.calc_weightedAve(hitpos, lat2s)
        hitneg = UT.calc_weightedAve(hitneg, lat2s)

    diffruns = [
        fictpos.squeeze(),
        fictneg.squeeze(),
        hitpos.squeeze(),
        hitneg.squeeze()
    ]

    return diffruns, lat, lon, lev
コード例 #29
0
ファイル: calc_SiberianHigh.py プロジェクト: zmlabe/SeaIceQBO
def readVariables(varnames, period, location):
    ### Call function for surface temperature data from reach run
    lat, lon, time, lev, tashit = MO.readExperiAll('%s' % varnames, 'HIT',
                                                   'surface')
    lat, lon, time, lev, tasfict = MO.readExperiAll('%s' % varnames, 'FICT',
                                                    'surface')

    ### Create 2d array of latitude and longitude
    lon2, lat2 = np.meshgrid(lon, lat)

    ### Read in QBO phases
    filenamehitp = directorydata + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[0]
    filenamehitno = directorydata + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[1]
    filenamehitn = directorydata + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[2]
    filenamehitp2 = directorydata2 + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[0]
    filenamehitno2 = directorydata2 + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[1]
    filenamehitn2 = directorydata2 + 'HIT/monthly/QBO_%s_HIT.txt' % qbophase[2]
    pos_hit = np.append(
        np.genfromtxt(filenamehitp, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamehitp2, unpack=True, usecols=[0], dtype='int') +
        101)
    non_hit = np.append(
        np.genfromtxt(filenamehitno, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamehitno2, unpack=True, usecols=[0], dtype='int') +
        101)
    neg_hit = np.append(
        np.genfromtxt(filenamehitn, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamehitn2, unpack=True, usecols=[0], dtype='int') +
        101)

    filenamefictp = directorydata + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[0]
    filenamefictno = directorydata + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[
        1]
    filenamefictn = directorydata + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[2]
    filenamefictp2 = directorydata2 + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[
        0]
    filenamefictno2 = directorydata2 + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[
        1]
    filenamefictn2 = directorydata2 + 'FICT/monthly/QBO_%s_FICT.txt' % qbophase[
        2]
    pos_fict = np.append(
        np.genfromtxt(filenamefictp, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamefictp2, unpack=True, usecols=[0], dtype='int') +
        101)
    non_fict = np.append(
        np.genfromtxt(filenamefictno, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamefictno2, unpack=True, usecols=[0], dtype='int') +
        101)
    neg_fict = np.append(
        np.genfromtxt(filenamefictn, unpack=True, usecols=[0], dtype='int'),
        np.genfromtxt(filenamefictn2, unpack=True, usecols=[0], dtype='int') +
        101)

    ### Concatonate runs
    runs = [tashit, tasfict]

    ### Separate per periods (ON,DJ,FM)
    if period == 'ON':
        tas_mo = np.empty(
            (3, tashit.shape[0], tashit.shape[2], tashit.shape[3]))
        for i in range(len(runs)):
            tas_mo[i] = np.nanmean(runs[i][:, 9:11, :, :], axis=1)
    elif period == 'DJ':
        tas_mo = np.empty(
            (3, tashit.shape[0] - 1, tashit.shape[2], tashit.shape[3]))
        for i in range(len(runs)):
            tas_mo[i], tas_mo[i] = UT.calcDecJan(runs[i], runs[i], lat, lon,
                                                 'surface', 1)
    elif period == 'FM':
        tas_mo = np.empty(
            (3, tashit.shape[0], tashit.shape[2], tashit.shape[3]))
        for i in range(len(runs)):
            tas_mo[i] = np.nanmean(runs[i][:, 1:3, :, :], axis=1)
    elif period == 'DJF':
        tas_mo = np.empty(
            (3, tashit.shape[0] - 1, tashit.shape[2], tashit.shape[3]))
        for i in range(len(runs)):
            tas_mo[i], tas_mo[i] = UT.calcDecJanFeb(runs[i], runs[i], lat, lon,
                                                    'surface', 1)
    elif period == 'M':
        tas_mo = np.empty(
            (3, tashit.shape[0], tashit.shape[2], tashit.shape[3]))
        for i in range(len(runs)):
            tas_mo[i] = runs[i][:, 2, :, :]
    elif period == 'D':
        tas_mo = np.empty(
            (3, tashit.shape[0], tashit.shape[2], tashit.shape[3]))
        for i in range(len(runs)):
            tas_mo[i] = runs[i][:, -1, :, :]
    elif period == 'N':
        tas_mo = np.empty(
            (3, tashit.shape[0], tashit.shape[2], tashit.shape[3]))
        for i in range(len(runs)):
            tas_mo[i] = runs[i][:, -2, :, :]
    elif period == 'ND':
        tas_mo = np.empty(
            (3, tashit.shape[0], tashit.shape[2], tashit.shape[3]))
        for i in range(len(runs)):
            tas_mo[i] = np.nanmean(runs[i][:, -2:, :, :], axis=1)
    else:
        ValueError('Wrong period selected! (ON,DJ,FM)')

    ### Composite by QBO phase
    tas_mohitpos = tas_mo[0][pos_hit, :, :]
    tas_mofictpos = tas_mo[1][pos_fict, :, :]

    tas_mohitnon = tas_mo[0][non_hit, :, :]
    tas_mofictnon = tas_mo[1][non_fict, :, :]

    tas_mohitneg = tas_mo[0][neg_hit, :, :]
    tas_mofictneg = tas_mo[1][neg_fict, :, :]

    ### Compute comparisons for months - select region
    if varnames == 'SLP':
        lonq = np.where((lon >= 80) & (lon <= 120))[0]
        ficthitpos = tas_mofictpos[:, :, lonq]
        ficthitnon = tas_mofictnon[:, :, lonq]
        ficthitneg = tas_mofictneg[:, :, lonq]
        latq = np.where((lat >= 40) & (lat <= 65))[0]
        ficthitpos = ficthitpos[:, latq]
        ficthitnon = ficthitnon[:, latq]
        ficthitneg = ficthitneg[:, latq]
        lat2sq = lat2[latq, :]
        lat2s = lat2sq[:, lonq]
        ficthitpos = UT.calc_weightedAve(ficthitpos, lat2s)
        ficthitnon = UT.calc_weightedAve(ficthitnon, lat2s)
        ficthitneg = UT.calc_weightedAve(ficthitneg, lat2s)
    diffruns = [
        ficthitpos.squeeze(),
        ficthitnon.squeeze(),
        ficthitneg.squeeze()
    ]

    return diffruns, lat, lon, lev
コード例 #30
0
ファイル: read_AMIP6.py プロジェクト: muskanmahajan37/AA
def readAMIP6Profile(variableq, experiment, level, detrend, sliceeq, period,
                     levelVert, epoch):
    print('\n>>> Using readAMIP6Profile function! \n')
    ###########################################################################
    ###########################################################################
    ###########################################################################
    ### Import modules
    import numpy as np
    from netCDF4 import Dataset
    import calc_Detrend as DT
    import calc_Utilities as UT

    ### Declare knowns
    ensembles = 10
    months = 12
    years = np.arange(1979, 2016 + 1, 1)

    ### Directory for experiments (remote server - Seley)
    directorydata = '/seley/zlabe/simu/'
    directorydata2 = '/home/zlabe/Documents/Research/AA/Data/'

    ###########################################################################
    ###########################################################################
    variable = variableq

    ###########################################################################
    ###########################################################################
    ###########################################################################
    ### Read in lat,lon,time from known file
    if level == 'surface':  # 3d variables
        dataq = Dataset(directorydata +
                        '%s1/monthly/T2M_1978-2016.nc' % experiment)
        time = dataq.variables['time'][12:]
        lev = 'surface'
        lat = dataq.variables['latitude'][:]
        lon = dataq.variables['longitude'][:]
        dataq.close()

        ###########################################################################
        ###########################################################################
        if sliceeq == False:
            ### Create empty variable
            varq = np.empty(
                (ensembles, time.shape[0], lat.shape[0], lon.shape[0]))
            varq[:, :, :, :] = np.nan  ### fill with nans

        elif sliceeq == True:
            ### Slice for Arctic
            latq = np.where(lat >= 65)[0]
            lat = lat[latq]
            ### Create empty variable
            varq = np.empty(
                (ensembles, time.shape[0], lat.shape[0], lon.shape[0]))
            varq[:, :, :, :] = np.nan  ### fill with nans
            print('SLICE for Arctic!')
        else:
            print(ValueError('Selected wrong slicing!'))

    ###########################################################################
    ###########################################################################
    elif level == 'profile':  # 4d variables
        dataq = Dataset(directorydata +
                        '%s1/monthly/TEMP_1978-2016.nc' % experiment)
        time = dataq.variables['time'][12:]
        lev = dataq.variables['level'][:]
        lat = dataq.variables['latitude'][:]
        lon = dataq.variables['longitude'][:]
        dataq.close()

        ###########################################################################
        ###########################################################################
        if sliceeq == False:
            ### Create empty variable
            varq = np.empty((ensembles, time.shape[0], lev.shape[0],
                             lat.shape[0], lon.shape[0]))
            varq[:, :, :, :, :] = np.nan  ### fill with nans
        elif sliceeq == True:
            ### Slice for Arctic
            latq = np.where(lat >= 65)[0]
            lat = lat[latq]
            ### Create empty variable
            varq = np.empty((ensembles, time.shape[0], lev.shape[0],
                             lat.shape[0], lon.shape[0]))
            varq[:, :, :, :, :] = np.nan  ### fill with nans
            print('SLICE for Arctic!')
        else:
            print(ValueError('Selected wrong slicing!'))

    ###########################################################################
    ###########################################################################
    else:
        print(ValueError('Selected wrong height - (surface or profile!)!'))

    ###########################################################################
    ###########################################################################
    ### Path name for file for each ensemble member
    for i in range(ensembles):
        filename = directorydata + '%s%s/' % (experiment,i+1) + \
                    'monthly/' + variable + '_1978-2016.nc'

        ###########################################################################
        ###########################################################################
        ### Read in Data
        if sliceeq == False:
            if level == 'surface':  # 3d variables
                data = Dataset(filename, 'r')
                varq[i, :, :, :] = data.variables[variable][12:468, :, :]
                data.close()
                print('Completed: Read data %s%s- %s!' %
                      (experiment, i + 1, variable))
            elif level == 'profile':  # 4d variables
                data = Dataset(filename, 'r')
                varq[i, :, :, :, :] = data.variables[variable][12:468, :, :, :]
                data.close()
                print('Completed: Read data %s%s- %s!' %
                      (experiment, i + 1, variable))
            else:
                print(
                    ValueError(
                        'Selected wrong height - (surface or profile!)!'))

    ###########################################################################
    ###########################################################################
        elif sliceeq == True:
            if level == 'surface':  # 3d variables
                data = Dataset(filename, 'r')
                varq[i, :, :, :] = data.variables[variable][12:468, latq, :]
                data.close()
                print('Completed: Read data %s%s- %s!' %
                      (experiment, i + 1, variable))
            elif level == 'profile':  # 4d variables
                data = Dataset(filename, 'r')
                varq[i, :, :, :, :] = data.variables[variable][12:468, :,
                                                               latq, :]
                data.close()
                print('Completed: Read data %s%s- %s!' %
                      (experiment, i + 1, variable))

            else:
                print(
                    ValueError(
                        'Selected wrong height - (surface or profile!)!'))

    ###########################################################################
    ###########################################################################
    ###########################################################################
    ### Reshape to split years and months
    if level == 'surface':  # 3d variables
        var = np.reshape(varq, (ensembles, varq.shape[1] // 12, months,
                                lat.shape[0], lon.shape[0]))
    elif level == 'profile':  # 4d variables
        var = np.reshape(varq, (ensembles, varq.shape[1] // 12, months,
                                lev.shape[0], lat.shape[0], lon.shape[0]))
    else:
        print(ValueError('Selected wrong height - (surface or profile!)!'))
    print('\nCompleted: Reshaped %s array!' % (variable))

    ### Save computer memory
    del varq

    ###########################################################################
    ###########################################################################
    ###########################################################################
    ### Convert units
    if variable in ('TEMP', 'T2M'):
        var = var - 273.15  # Kelvin to degrees Celsius
        print('Completed: Changed units (K to C)!')
    elif variable == 'SWE':
        var = var * 1000.  # Meters to Millimeters
        print('Completed: Changed units (m to mm)!')

    ###########################################################################
    ###########################################################################
    ###########################################################################
    ### Missing data (fill value to nans)
    var[np.where(var <= -8.99999987e+33)] = np.nan
    print('Completed: Filled missing data to nan!')

    ### Detrend data if turned on
    if detrend == True:
        var = DT.detrendData(var, level, 'monthly')

    ### Slice over month(s) of interest
    if period == 'Annual':
        varm = np.nanmean(var[:, :, :, :, :], axis=3)
    if period == 'OND':
        varm = np.nanmean(var[:, :, -3:, :, :], axis=3)
    elif period == 'ND':
        varm = np.nanmean(var[:, :, -2:, :, :], axis=3)
    elif period == 'D':
        varm = var[:, :, -1:, :, :].squeeze()
    elif period == 'F':
        varm = var[:, :, 1, :, :].squeeze()
    elif period == 'FM':
        varm = var[:, :, 1:3, :, :].squeeze()
    elif period == 'JFM':
        varm = np.nanmean(var[:, :, 0:3, :, :], axis=3)
    elif period == 'DJF':
        varm = np.empty((var.shape[0], var.shape[1] - 1, var.shape[3],
                         var.shape[4], var.shape[5]))
        for j in range(var.shape[0]):
            varm[j, :, :, :, :] = UT.calcDecJanFeb(var[j, :, :, :, :, :],
                                                   var[j, :, :, :, :, :], lat,
                                                   lon, 'profile', 17)

    ### Calculate ensemble mean
    mean = np.nanmean(varm, axis=0)

    ### Calculate vertical levels
    levqq = np.where((lev >= levelVert))[0]
    levvv = lev[levqq]
    levelmean = mean[:, levqq, :, :]

    ### Meshgrid for lat,lon
    lon2, lat2 = np.meshgrid(lon, lat)
    polarave = UT.calc_weightedAve(levelmean, lat2)

    ### Epoch differences
    new = np.nanmean(polarave[-epoch:, :], axis=0)
    old = np.nanmean(polarave[:epoch, :], axis=0)
    diff = new - old

    ### Save file
    np.savetxt(directorydata2 + '%s_1000-%s_%s.txt' %
               (experiment, levelVert, variable),
               diff,
               delimiter=',',
               fmt='%.3f')

    print('\n>>> Completed: Finished readAMIP6Profile function!')
    return lat, lon, time, levvv, diff