# Initialize toe for each basin (density, lat) toe1_a = np.ma.masked_all((levN, latN)) toe1_p = np.ma.masked_all((levN, latN)) toe1_i = np.ma.masked_all((levN, latN)) toe2_a = np.ma.masked_all((levN, latN)) toe2_p = np.ma.masked_all((levN, latN)) toe2_i = np.ma.masked_all((levN, latN)) # Initialize output variable varToE1 = np.ma.masked_all( (basinN, levN, latN)) # (>1std) (basin,density,latitude) varToE2 = np.ma.masked_all( (basinN, levN, latN)) # (>2std) (basin,density,latitude) # Compute ToE as last date when diff 1%CO2 - PiControl is larger than mult * stddev toe1_a = np.reshape(findToE(varsignal_a, stdvarpiC_a, 1), (levN, latN)) toe1_p = np.reshape(findToE(varsignal_p, stdvarpiC_p, 1), (levN, latN)) toe1_i = np.reshape(findToE(varsignal_i, stdvarpiC_i, 1), (levN, latN)) toe2_a = np.reshape(findToE(varsignal_a, stdvarpiC_a, multStd), (levN, latN)) toe2_p = np.reshape(findToE(varsignal_p, stdvarpiC_p, multStd), (levN, latN)) toe2_i = np.reshape(findToE(varsignal_i, stdvarpiC_i, multStd), (levN, latN)) # Save in output variable varToE1[1, :, :] = toe1_a varToE1[2, :, :] = toe1_p varToE1[3, :, :] = toe1_i varToE2[1, :, :] = toe2_a varToE2[2, :, :] = toe2_p
varsignal_i[:, j] = averageDom( varCO2[:, 3, :, :] - meanvarpiC[3, :, :], 3, domain['Indian'], lat, density) if method_noise == 'average_std': varnoise_i[j] = averageDom(varstd[3, :, :], 2, domain['Indian'], lat, density) else: varnoise_i[j] = np.ma.std(averageDom(varpiC[:, 3, :, :], 3, domain['Indian'], lat, density), axis=0) # Compute ToE of averaged domain if domain['Atlantic'] != None and np.ma.is_masked( varnoise_a[j]) == False: toe1_a[j] = findToE(varsignal_a[:, j], varnoise_a[j], 1) toe2_a[j] = findToE(varsignal_a[:, j], varnoise_a[j], multStd) print(toe1_a[j], toe2_a[j]) if domain['Pacific'] != None and np.ma.is_masked( varnoise_p[j]) == False: toe1_p[j] = findToE(varsignal_p[:, j], varnoise_p[j], 1) toe2_p[j] = findToE(varsignal_p[:, j], varnoise_p[j], multStd) if domain['Indian'] != None and np.ma.is_masked( varnoise_i[j]) == False: toe1_i[j] = findToE(varsignal_i[:, j], varnoise_i[j], 1) toe2_i[j] = findToE(varsignal_i[:, j], varnoise_i[j], multStd) # Take out runs where the signal is of opposite sign than expected if signal_domains[j] == 'fresher': if np.ma.mean(varsignal_a[-5:, j], axis=0) > multStd * varnoise_a[j]:
if use_piC == False: varsignal_a[145:,:,:] = varrcp_a-meanvarhn_a varsignal_p[145:,:,:] = varrcp_p-meanvarhn_p varsignal_i[145:,:,:] = varrcp_i-meanvarhn_i else: varsignal_a[145:,:,:] = varrcp_a-meanvarpiC_a varsignal_p[145:,:,:] = varrcp_p-meanvarpiC_p varsignal_i[145:,:,:] = varrcp_i-meanvarpiC_i # Reorganise i,j dims in single dimension data (speeds up loops) varsignal_a = np.reshape(varsignal_a, (timN, levN*latN)) varsignal_p = np.reshape(varsignal_p, (timN, levN*latN)) varsignal_i = np.reshape(varsignal_i, (timN, levN*latN)) # Compute ToE as last date when diff hist+RCP - histNat is larger than mult * stddev toe_a = np.reshape(findToE(varsignal_a, stdvarhn_a, multStd),(levN,latN)) toe_p = np.reshape(findToE(varsignal_p, stdvarhn_p, multStd),(levN,latN)) toe_i = np.reshape(findToE(varsignal_i, stdvarhn_i, multStd),(levN,latN)) # Save in output variable varToE[k,1,:,:] = toe_a varToE[k,2,:,:] = toe_p varToE[k,3,:,:] = toe_i # Save in output file if use_piC == False: fileName = 'cmip5.'+model['name']+'.toe_zonal_rcp_histNat.nc' dir = '/home/ysilvy/Density_bining/Yona_analysis/data/toe_zonal/toe_rcp85_histNat/' description = 'Time of Emergence hist+rcp8.5 vs. histNat for each member. \n' \ 'The historical runs are prolonged by the 95 years of RCP8.5. ' \ 'The ensemble mean historicalNat is used here for all historical runs of the model. \n' \
varnoise_a[j] = np.ma.std(averageDom(varpiC[:,1,:,:], 3, domain['Atlantic'], lat, density), axis=0) if domain['Pacific'] != None: varsignal_p[:,j] = averageDom(varCO2[:,2,:,:]-varpiC[:,2,:,:], 3, domain['Pacific'], lat, density) if method_noise == 'average_std': varnoise_p[j] = averageDom(varstd[2,:,:], 2, domain['Pacific'], lat, density) else: varnoise_p[j] = np.ma.std(averageDom(varpiC[:,2,:,:], 3, domain['Pacific'], lat, density), axis=0) if domain['Indian'] != None: varsignal_i[:,j] = averageDom(varCO2[:,3,:,:]-varpiC[:,3,:,:], 3, domain['Indian'], lat, density) if method_noise == 'average_std': varnoise_i[j] = averageDom(varstd[3,:,:], 2, domain['Indian'], lat, density) else: varnoise_i[j] = np.ma.std(averageDom(varpiC[:,3,:,:], 3, domain['Indian'], lat, density), axis=0) # Compute ToE of averaged domain if domain['Atlantic'] != None and np.ma.is_masked(varnoise_a[j]) == False: toe_a[j] = findToE(varsignal_a[:,j], varnoise_a[j], multStd) if domain['Pacific'] != None and np.ma.is_masked(varnoise_p[j]) == False: toe_p[j] = findToE(varsignal_p[:,j], varnoise_p[j], multStd) if domain['Indian'] != None and np.ma.is_masked(varnoise_i[j]) == False: toe_i[j] = findToE(varsignal_i[:,j], varnoise_i[j], multStd) varToE[1,:] = toe_a varToE[2,:] = toe_p varToE[3,:] = toe_i print('') # Save in output file fileName = 'cmip5.'+model['name']+'.toe_1pctCO2vsPiControl_method2_'+method_noise+'.nc' if method_noise == 'average_std':
def mmeAveMsk2D(listFiles, years, inDir, outDir, outFile, timeInt, mme, timeBowl, ToeType, debug=True): ''' The mmeAveMsk2D() function averages rhon/lat density bined files with differing masks It ouputs - the MME - a percentage of non-masked bins - the sign agreement of period2-period1 differences - ToE per run and for MME Author: Eric Guilyardi : [email protected] Created on Tue Nov 25 13:56:20 CET 2014 Inputs: ------- - listFiles(str) - the list of files to be averaged - years(t1,t2) - years for slice read - inDir[](str) - input directory where files are stored (add histnat as inDir[1] for ToE) - outDir(str) - output directory - outFile(str) - output file - timeInt(2xindices) - indices of init period to compare with (e.g. [1,20]) - mme(bool) - multi-model mean (will read in single model ensemble stats) - timeBowl - either time 'mean' or time 'max' bowl used to mask out bowl - ToeType(str) - ToE type ('F': none, 'histnat') -> requires running first mm+mme without ToE to compute Stddev - debug <optional> - boolean value Notes: ----- - EG 25 Nov 2014 - Initial function write - EG 27 Nov 2014 - Rewrite with loop on variables - EG 06 Dec 2014 - Added agreement on difference with init period - save as <var>Agree - EG 07 Dec 2014 - Read bowl to remove points above bowl - save as <var>Bowl - EG 19 Apr 2016 - ToE computation (just for 2D files) - EG 07 Oct 2016 - add 3D file support - EG 21 Nov 2016 - move 3D support to new function - EG 10 jan 2017 - added timeBowl option - TODO : - remove loops - add computation of ToE per model (toe 1 and toe 2) see ticket #50 - add isonhtc (see ticket #48) ''' # CDMS initialisation - netCDF compression comp = 1 # 0 for no compression cdm.setNetcdfShuffleFlag(comp) cdm.setNetcdfDeflateFlag(comp) cdm.setNetcdfDeflateLevelFlag(comp) cdm.setAutoBounds('on') # Numpy initialisation npy.set_printoptions(precision=2) if debug: debug = True else: debug = False # File dim and grid inits t1 = years[0] t2 = years[1] if t2 <= 0: useLastYears = True t2 = -t2 else: useLastYears = False t10 = t1 t20 = t2 # Bound of period average to remove peri1 = timeInt[0] peri2 = timeInt[1] fi = cdm.open(inDir[0] + '/' + listFiles[0]) isond0 = fi['isondepth'] # Create variable handle # Get grid objects axesList = isond0.getAxisList() sigmaGrd = isond0.getLevel() latN = isond0.shape[3] levN = isond0.shape[2] basN = isond0.shape[1] varsig = 'ptopsigma' # Declare and open files for writing if os.path.isfile(outDir + '/' + outFile): os.remove(outDir + '/' + outFile) outFile_f = cdm.open(outDir + '/' + outFile, 'w') # Testing mme with less models #listFiles=listFiles[0:4] #timN = isond0.shape[0] timN = t2 - t1 runN = len(listFiles) print ' Number of members:', len(listFiles) valmask = isond0.missing_value[0] varList = [ 'isondepth', 'isonpers', 'isonso', 'isonthetao', 'isonthick', 'isonvol' ] varFill = [0., 0., valmask, valmask, 0., 0.] # init arrays (2D rho/lat) percent = npy.ma.ones([runN, timN, basN, levN, latN], dtype='float32') * 0. #minbowl = npy.ma.ones([basN,latN], dtype='float32')*1000. varbowl = npy.ma.ones([runN, timN, basN, latN], dtype='float32') * 1. #varList = ['isondepth'] #print ' !!! ### Testing one variable ###' #varList = ['isonthetao'] # init time axis time = cdm.createAxis(npy.float32(range(timN))) time.id = 'time' time.units = 'years since 1861' time.designateTime() # init ensemble axis ensembleAxis = cdm.createAxis(npy.float32(range(runN))) ensembleAxis.id = 'members' ensembleAxis.units = 'N' # loop on variables for iv, var in enumerate(varList): # Array inits (2D rho/lat 3D rho/lat/lon) #shapeR = [basN,levN,latN] isonvar = npy.ma.ones([runN, timN, basN, levN, latN], dtype='float32') * valmask print('isonvar shape: ', isonvar.shape) vardiff, varbowl2D = [ npy.ma.ones([runN, timN, basN, levN, latN], dtype='float32') for _ in range(2) ] varstd, varToE1, varToE2 = [ npy.ma.ones([runN, basN, levN, latN], dtype='float32') * valmask for _ in range(3) ] varones = npy.ma.ones([runN, timN, basN, levN, latN], dtype='float32') * 1. print ' Variable ', iv, var # loop over files to fill up array for i, file in enumerate(listFiles): ft = cdm.open(inDir[0] + '/' + file) model = file.split('.')[1] timeax = ft.getAxis('time') file1d = replace(inDir[0] + '/' + file, '2D', '1D') if os.path.isfile(file1d): f1d = cdm.open(file1d) else: print 'ERROR:', file1d, 'missing (if mme, run 1D first)' sys.exit(1) tmax = timeax.shape[0] if i == 0: tmax0 = tmax #adapt [t1,t2] time bounds to piControl last NN years if useLastYears: t1 = tmax - t20 t2 = tmax else: if tmax != tmax0: print 'wrong time axis: exiting...' return # read array # loop over time/density for memory management for it in range(timN): t1r = t1 + it t2r = t1r + 1 isonRead = ft(var, time=slice(t1r, t2r)) if varFill[iv] != valmask: isonvar[i, it, ...] = isonRead.filled(varFill[iv]) else: isonvar[i, it, ...] = isonRead # compute percentage of non-masked points accros MME if iv == 0: maskvar = mv.masked_values(isonRead.data, valmask).mask percent[i, ...] = npy.float32(npy.equal(maskvar, 0)) if mme: # if mme then just accumulate Bowl, Agree fields varst = var + 'Agree' vardiff[i, ...] = ft(varst, time=slice(t1, t2)) varb = var + 'Bowl' varbowl2D[i, ...] = ft(varb, time=slice(t1, t2)) else: # Compute difference with average of first initN years varinit = cdu.averager(isonvar[i, peri1:peri2, ...], axis=0) for t in range(timN): vardiff[i, t, ...] = isonvar[i, t, ...] - varinit vardiff[i, ...].mask = isonvar[i, ...].mask # Read bowl and truncate 2D field above bowl if iv == 0: bowlRead = f1d(varsig, time=slice(t1, t2)) varbowl[i, ...] = bowlRead # Compute Stddev varstd[i, ...] = npy.ma.std(isonvar[i, ...], axis=0) # Compute ToE if ToeType == 'histnat': # Read mean and Std dev from histnat if i == 0: filehn = glob.glob(inDir[1] + '/cmip5.' + model + '.*zon2D*')[0] #filehn = replace(outFile,'historical','historicalNat') fthn = cdm.open(filehn) varmeanhn = fthn(var) varst = var + 'Std' varmaxstd = fthn(varst) toemult = 1. signal = npy.reshape(isonvar[i, ...] - varmeanhn, (timN, basN * levN * latN)) noise = npy.reshape(varmaxstd, (basN * levN * latN)) varToE1[i, ...] = npy.reshape(findToE(signal, noise, toemult), (basN, levN, latN)) toemult = 2. varToE2[i, ...] = npy.reshape(findToE(signal, noise, toemult), (basN, levN, latN)) ft.close() f1d.close() # <-- end of loop on files # Compute percentage of bin presence # Only keep points where percent > 50% if iv == 0: percenta = (cdu.averager(percent, axis=0)) * 100. percenta = mv.masked_less(percenta, 50) percentw = cdm.createVariable( percenta, axes=[time, axesList[1], axesList[2], axesList[3]], id='isonpercent') percentw._FillValue = valmask percentw.long_name = 'percentage of MME bin' percentw.units = '%' outFile_f.write(percentw.astype('float32')) # Sign of difference if mme: vardiffsgSum = cdu.averager(vardiff, axis=0) vardiffsgSum = cdm.createVariable( vardiffsgSum, axes=[time, axesList[1], axesList[2], axesList[3]], id='foo') vardiffsgSum = maskVal(vardiffsgSum, valmask) vardiffsgSum.mask = percentw.mask else: vardiffsg = npy.copysign(varones, vardiff) # average signs vardiffsgSum = cdu.averager(vardiffsg, axis=0) vardiffsgSum = mv.masked_greater(vardiffsgSum, 10000.) vardiffsgSum.mask = percentw.mask vardiffsgSum._FillValue = valmask # average variable accross members isonVarAve = cdu.averager(isonvar, axis=0) isonVarAve = cdm.createVariable( isonVarAve, axes=[time, axesList[1], axesList[2], axesList[3]], id='foo') # mask if varFill[iv] == valmask: isonVarAve = maskVal(isonVarAve, valmask) isonVarAve.mask = percentw.mask # Only keep points with rhon > bowl-delta_rho delta_rho = 0. if mme: # start from average of <var>Agree isonVarBowl = cdu.averager(varbowl2D, axis=0) isonVarBowl = cdm.createVariable( isonVarBowl, axes=[time, axesList[1], axesList[2], axesList[3]], id='foo') isonVarBowl = maskVal(isonVarBowl, valmask) isonVarBowl.mask = percentw.mask # Compute intermodel stddev isonVarStd = statistics.std(varbowl2D, axis=0) isonVarStd = cdm.createVariable( isonVarStd, axes=[time, axesList[1], axesList[2], axesList[3]], id='foo') isonVarStd = maskVal(isonVarStd, valmask) isonVarStd.mask = percentw.mask if iv == 0: # Read mulitmodel sigma on bowl and average in time file1d = replace(outDir + '/' + outFile, '2D', '1D') if os.path.isfile(file1d): f1d = cdm.open(file1d) else: print 'ERROR:', file1d, 'missing (if mme, run 1D first)' sys.exit(1) bowlRead = f1d(varsig, time=slice(t1, t2)) f1d.close() siglimit = cdu.averager(bowlRead, axis=0) - delta_rho # TODO: remove loop by building global array with 1/0 for il in range(latN): for ib in range(basN): #if ib == 2: # print il, siglimit[ib,il] if siglimit[ib, il] < valmask / 1000.: # if mme bowl density defined, mask above bowl index = (npy.argwhere(sigmaGrd[:] >= siglimit[ib, il])) isonVarBowl[:, ib, 0:index[0], il].mask = True isonVarStd[:, ib, 0:index[0], il].mask = True vardiffsgSum[:, ib, 0:index[0], il].mask = True else: # mask all points isonVarBowl[:, ib, :, il].mask = True isonVarStd[:, ib, :, il].mask = True vardiffsgSum[:, ib, :, il].mask = True else: isonVarBowl = isonVarAve * 1. # start from variable isonVarStd = isonVarAve * 1. # start from variable if iv == 0: siglimit = cdu.averager(varbowl, axis=0) # average accross members # Average bowl in time if timeBowl == 'mean': siglimit = cdu.averager(siglimit, axis=0) - delta_rho # or take largest sigma over time else: siglimit = npy.ma.max(siglimit, axis=0) - delta_rho # TODO: remove loop by building global array with 1/0 for il in range(latN): for ib in range(basN): if siglimit[ib, il] < valmask / 1000.: # if bowl density defined, mask above bowl index = (npy.argwhere(sigmaGrd[:] >= siglimit[ib, il]) )[:, 0] #Add [:,0] for python Yona #import code #code.interact(banner='index', local=dict(locals(), **globals())) isonVarBowl[:, ib, 0:index[0], il].mask = True vardiffsgSum[:, ib, 0:index[0], il].mask = True else: # mask all points vardiffsgSum[:, ib, :, il].mask = True isonVarBowl = maskVal(isonVarBowl, valmask) # Find max of Std dev of all members isonVarStd = npy.ma.max(varstd, axis=0) # mask if varFill[iv] == valmask: isonVarStd = maskVal(isonVarStd, valmask) # Write isonave = cdm.createVariable( isonVarAve, axes=[time, axesList[1], axesList[2], axesList[3]], id=isonRead.id) isonave.long_name = isonRead.long_name isonave.units = isonRead.units isonavediff = cdm.createVariable( vardiffsgSum, axes=[time, axesList[1], axesList[2], axesList[3]], id=isonRead.id + 'Agree') isonavediff.long_name = isonRead.long_name isonavediff.units = isonRead.units isonavebowl = cdm.createVariable( isonVarBowl, axes=[time, axesList[1], axesList[2], axesList[3]], id=isonRead.id + 'Bowl') isonavebowl.long_name = isonRead.long_name isonavebowl.units = isonRead.units if not mme: isonmaxstd = cdm.createVariable( isonVarStd, axes=[axesList[1], axesList[2], axesList[3]], id=isonRead.id + 'Std') isonmaxstd.long_name = isonRead.long_name isonmaxstd.units = isonRead.units outFile_f.write(isonave.astype('float32')) outFile_f.write(isonavediff.astype('float32')) outFile_f.write(isonavebowl.astype('float32')) if not mme: outFile_f.write(isonmaxstd.astype('float32')) if ToeType == 'histnat': isontoe1 = cdm.createVariable( varToE1, axes=[ensembleAxis, axesList[1], axesList[2], axesList[3]], id=isonRead.id + 'ToE1') isontoe1.long_name = 'ToE 1 for ' + isonRead.long_name isontoe1.units = 'Year' isontoe2 = cdm.createVariable( varToE2, axes=[ensembleAxis, axesList[1], axesList[2], axesList[3]], id=isonRead.id + 'ToE2') isontoe2.long_name = 'ToE 2 for ' + isonRead.long_name isontoe2.units = 'Year' outFile_f.write(isontoe1.astype('float32')) outFile_f.write(isontoe2.astype('float32')) if mme: isonvarstd = cdm.createVariable( isonVarStd, axes=[time, axesList[1], axesList[2], axesList[3]], id=isonRead.id + 'ModStd') isonvarstd.long_name = isonRead.long_name + ' intermodel std' isonvarstd.units = isonRead.units outFile_f.write(isonvarstd.astype('float32')) # <--- end of loop on variables outFile_f.close() fi.close()
varpms = np.ma.std(varpiC_p, axis=0) varims = np.ma.std(varpiC_i, axis=0) # -- reorganise i,j dims in single dimension data (speeds up loops) varCO2_a = np.reshape(varCO2_a, (timN,levN*latN)) varpiC_a = np.reshape(varpiC_a,(timN,levN*latN)) varams = np.reshape(varams, (levN*latN)) varCO2_p = np.reshape(varCO2_p, (timN,levN*latN)) varpiC_p = np.reshape(varpiC_p,(timN,levN*latN)) varpms = np.reshape(varpms, (levN*latN)) varCO2_i = np.reshape(varCO2_i, (timN,levN*latN)) varpiC_i = np.reshape(varpiC_i,(timN,levN*latN)) varims = np.reshape(varims, (levN*latN)) # -- Compute ToE as last date when diff 1pctCO2 - piControl is larger than mult * stddev toei_a = np.reshape(findToE(varCO2_a-varpiC_a, varams, multStd),(levN,latN)) toei_p = np.reshape(findToE(varCO2_p-varpiC_p, varpms, multStd),(levN,latN)) toei_i = np.reshape(findToE(varCO2_i-varpiC_i, varims, multStd),(levN,latN)) # -- Save toe_a[i,:,:] = toei_a toe_p[i,:,:] = toei_p toe_i[i,:,:] = toei_i # -- Select domain to average for each model domain = ToEdomain1pctCO2vsPiC(model['name'], domain_name)[0] domain_char = ToEdomain1pctCO2vsPiC(model['name'], domain_name)[1] # -- Average toe if domain['Atlantic'] != None: varToEA[i] = np.ma.around(averageDom(toe_a[i,:,:], 2, domain['Atlantic'], lat, density))
if domain['Indian'] != None: varsignal_i[:,k,j] = averageDom(varhrcp_i-meanvarhn_i, 3, domain['Indian'], lat, density) #varsignal_i[145:,k,j] = averageDom(varhrcp_i[145:,:,:]-meanvarhn_i, 3, domain['Indian'], lat, density) else: if domain['Atlantic'] != None: varsignal_a[:,k,j] = averageDom(varhrcp_a-meanvarpiC_a, 3, domain['Atlantic'], lat, density) if domain['Pacific'] !=None: varsignal_p[:,k,j] = averageDom(varhrcp_p-meanvarpiC_p, 3, domain['Pacific'], lat, density) if domain['Indian'] != None: varsignal_i[:,k,j] = averageDom(varhrcp_i-meanvarpiC_i, 3, domain['Indian'], lat, density) # print(' varsignal shape:', varsignal_a.shape, varsignal_p.shape, varsignal_i.shape) # Compute ToE of averaged domain for run k if domain['Atlantic'] != None and np.ma.is_masked(varnoise_a[j]) == False: toe2_a[k,j] = findToE(varsignal_a[:,k,j], varnoise_a[j], multStd) + iniyear toe1_a[k,j] = findToE(varsignal_a[:,k,j], varnoise_a[j], 1) + iniyear # print(toe1_a[k,j], toe2_a[k,j]) if domain['Pacific'] != None and np.ma.is_masked(varnoise_p[j]) == False: toe2_p[k,j] = findToE(varsignal_p[:,k,j], varnoise_p[j], multStd) + iniyear toe1_p[k,j] = findToE(varsignal_p[:,k,j], varnoise_p[j], 1) + iniyear if domain['Indian'] != None and np.ma.is_masked(varnoise_i[j]) == False: toe2_i[k,j] = findToE(varsignal_i[:,k,j], varnoise_i[j], multStd) + iniyear toe1_i[k,j] = findToE(varsignal_i[:,k,j], varnoise_i[j], 1) + iniyear # Take out runs where the signal is of opposite sign than expected if v != 'Z': if signal_domains[j] == 'fresher': if np.ma.mean(varsignal_a[-5:,k,j],axis=0) > 2 * varnoise_a[j]: toe2_a[k,j] = np.ma.masked if np.ma.mean(varsignal_a[-5:,k,j],axis=0) > 1 * varnoise_a[j]:
varsignal_a = varhrcp_a-meanvarpiC_a varsignal_p = varhrcp_p-meanvarpiC_p varsignal_i = varhrcp_i-meanvarpiC_i # Save signal varsignal_end[k,1,:,:] = np.ma.average(varsignal_a[-5:,:,:],axis=0) varsignal_end[k,2,:,:] = np.ma.average(varsignal_p[-5:,:,:],axis=0) varsignal_end[k,3,:,:] = np.ma.average(varsignal_i[-5:,:,:],axis=0) # Reorganise i,j dims in single dimension data (speeds up loops) varsignal_a = np.reshape(varsignal_a, (timN, levN*latN)) varsignal_p = np.reshape(varsignal_p, (timN, levN*latN)) varsignal_i = np.reshape(varsignal_i, (timN, levN*latN)) # Compute ToE as last date when diff hist+RCP - histNat is larger than mult * stddev toe2_a = np.reshape(findToE(varsignal_a, varnoise_a, multStd),(levN,latN)) toe2_p = np.reshape(findToE(varsignal_p, varnoise_p, multStd),(levN,latN)) toe2_i = np.reshape(findToE(varsignal_i, varnoise_i, multStd),(levN,latN)) toe1_a = np.reshape(findToE(varsignal_a, varnoise_a, 1),(levN,latN)) toe1_p = np.reshape(findToE(varsignal_p, varnoise_p, 1),(levN,latN)) toe1_i = np.reshape(findToE(varsignal_i, varnoise_i, 1),(levN,latN)) # Save in output variable varToE1[k,1,:,:] = toe1_a varToE1[k,2,:,:] = toe1_p varToE1[k,3,:,:] = toe1_i varToE2[k,1,:,:] = toe2_a varToE2[k,2,:,:] = toe2_p varToE2[k,3,:,:] = toe2_i # Mask points because when calculating ToE, masked points (e.g. bathy, no data) are set to 240 (=no emergence)
if ToE: # reorganise i,j dims in single dimension data (speeds up loops) tvarha = np.reshape(tvarha, (timN, levN * latN)) tvarhna = np.reshape(tvarhna, (timN, levN * latN)) varams = np.reshape(varams, (levN * latN)) tvarhp = np.reshape(tvarhp, (timN, levN * latN)) tvarhnp = np.reshape(tvarhnp, (timN, levN * latN)) varpms = np.reshape(varpms, (levN * latN)) tvarhi = np.reshape(tvarhi, (timN, levN * latN)) tvarhni = np.reshape(tvarhni, (timN, levN * latN)) varims = np.reshape(varims, (levN * latN)) # Compute ToE as last date when diff hist-histNat is larger than mult * stddev varam = np.reshape( findToE(tvarha - tvarhna, varams, multStd) + iniyear, (levN, latN)) varpm = np.reshape( findToE(tvarhp - tvarhnp, varpms, multStd) + iniyear, (levN, latN)) varim = np.reshape( findToE(tvarhi - tvarhni, varims, multStd) + iniyear, (levN, latN)) # shade ToE and contour diff hist-histNat tmpa = vara vara = varam + 1900 varam = tmpa tmpp = varp varp = varpm + 1900 varpm = tmpp tmpi = vari vari = varim + 1900 varim = tmpi
varsignal_p[145:,k,j] = averageDom(varrcp_p-meanvarhn_p, 3, domain['Pacific'], lat, density) else: varsignal_p[145:,k,j] = averageDom(varrcp_p-meanvarpiC_p, 3, domain['Pacific'], lat, density) if domain['Indian'] != None: varsignal_i[0:145,k,j] = averageDom(varh_i-varhn_i, 3, domain['Indian'], lat, density) if use_piC != True: varsignal_i[145:,k,j] = averageDom(varrcp_i-meanvarhn_i, 3, domain['Indian'], lat, density) else: varsignal_i[145:,k,j] = averageDom(varrcp_i-meanvarpiC_i, 3, domain['Indian'], lat, density) print(' varsignal shape:', varsignal_a.shape, varsignal_p.shape, varsignal_i.shape) # Compute ToE of averaged domain for run k if use_piC != True: if domain['Atlantic'] != None and np.ma.is_masked(varnoise_a[j]) == False: toe_a[k,j] = findToE(varsignal_a[:,k,j], varnoise_a[j], multStd) + iniyear if domain['Pacific'] != None and np.ma.is_masked(varnoise_p[j]) == False: toe_p[k,j] = findToE(varsignal_p[:,k,j], varnoise_p[j], multStd) + iniyear if domain['Indian'] != None and np.ma.is_masked(varnoise_i[j]) == False: toe_i[k,j] = findToE(varsignal_i[:,k,j], varnoise_i[j], multStd) + iniyear else: if domain['Atlantic'] != None and np.ma.is_masked(varnoise_a[j]) == False \ and np.ma.is_masked(varnoise2_a[j]) == False: toe_a[k,j] = findToE_2thresholds(varsignal_a[:,k,j], varnoise_a[j], varnoise2_a[j], 145, multStd) + iniyear if domain['Pacific'] != None and np.ma.is_masked(varnoise_p[j]) == False \ and np.ma.is_masked(varnoise2_p[j]) == False: toe_p[k,j] = findToE_2thresholds(varsignal_p[:,k,j], varnoise_p[j], varnoise2_p[j], 145, multStd) + iniyear if domain['Indian'] != None and np.ma.is_masked(varnoise_i[j]) == False \ and np.ma.is_masked(varnoise2_i[j]) == False: toe_i[k,j] = findToE_2thresholds(varsignal_i[:,k,j], varnoise_i[j], varnoise2_i[j], 145, multStd) + iniyear
varpms = np.ma.std(varpiC_p, axis=0) varims = np.ma.std(varpiC_i, axis=0) # -- reorganise i,j dims in single dimension data (speeds up loops) varCO2_a = np.reshape(varCO2_a, (timN, levN * latN)) varpiC_a = np.reshape(varpiC_a, (timN, levN * latN)) varams = np.reshape(varams, (levN * latN)) varCO2_p = np.reshape(varCO2_p, (timN, levN * latN)) varpiC_p = np.reshape(varpiC_p, (timN, levN * latN)) varpms = np.reshape(varpms, (levN * latN)) varCO2_i = np.reshape(varCO2_i, (timN, levN * latN)) varpiC_i = np.reshape(varpiC_i, (timN, levN * latN)) varims = np.reshape(varims, (levN * latN)) # -- Compute ToE as last date when diff 1pctCO2 - piControl is larger than mult * stddev toei_a = np.reshape(findToE(varCO2_a - varpiC_a, varams, multStd), (levN, latN)) toei_p = np.reshape(findToE(varCO2_p - varpiC_p, varpms, multStd), (levN, latN)) toei_i = np.reshape(findToE(varCO2_i - varpiC_i, varims, multStd), (levN, latN)) # -- Save toe_a[i, :, :] = toei_a toe_p[i, :, :] = toei_p toe_i[i, :, :] = toei_i # -- Select domain to average for each model domain = ToEdomain1pctCO2vsPiC(model['name'], domain_name)[0] domain_char = ToEdomain1pctCO2vsPiC(model['name'], domain_name)[1]
def mmeAveMsk2D(listFiles, years, inDir, outDir, outFile, timeInt, mme, timeBowl, ToeType, debug=True): ''' The mmeAveMsk2D() function averages rhon/lat density bined files with differing masks It ouputs - the MME - a percentage of non-masked bins - the sign agreement of period2-period1 differences - ToE per run and for MME Author: Eric Guilyardi : [email protected] Created on Tue Nov 25 13:56:20 CET 2014 Inputs: ------- - listFiles(str) - the list of files to be averaged - years(t1,t2) - years for slice read - inDir[](str) - input directory where files are stored (add histnat as inDir[1] for ToE) - outDir(str) - output directory - outFile(str) - output file - timeInt(2xindices) - indices of init period to compare with (e.g. [1,20]) - mme(bool) - multi-model mean (will read in single model ensemble stats) - timeBowl - either time 'mean' or time 'max' bowl used to mask out bowl - ToeType(str) - ToE type ('F': none, 'histnat') -> requires running first mm+mme without ToE to compute Stddev - debug <optional> - boolean value Notes: ----- - EG 25 Nov 2014 - Initial function write - EG 27 Nov 2014 - Rewrite with loop on variables - EG 06 Dec 2014 - Added agreement on difference with init period - save as <var>Agree - EG 07 Dec 2014 - Read bowl to remove points above bowl - save as <var>Bowl - EG 19 Apr 2016 - ToE computation (just for 2D files) - EG 07 Oct 2016 - add 3D file support - EG 21 Nov 2016 - move 3D support to new function - EG 10 jan 2017 - added timeBowl option - TODO : - remove loops - add computation of ToE per model (toe 1 and toe 2) see ticket #50 - add isonhtc (see ticket #48) ''' # CDMS initialisation - netCDF compression comp = 1 # 0 for no compression cdm.setNetcdfShuffleFlag(comp) cdm.setNetcdfDeflateFlag(comp) cdm.setNetcdfDeflateLevelFlag(comp) cdm.setAutoBounds('on') # Numpy initialisation npy.set_printoptions(precision=2) if debug: debug = True else: debug = False # File dim and grid inits t1 = years[0] t2 = years[1] if t2 <= 0: useLastYears = True t2 = -t2 else: useLastYears = False t10 = t1 t20 = t2 # Bound of period average to remove peri1 = timeInt[0] peri2 = timeInt[1] fi = cdm.open(inDir[0]+'/'+listFiles[0]) isond0 = fi['isondepth'] ; # Create variable handle # Get grid objects axesList = isond0.getAxisList() sigmaGrd = isond0.getLevel() latN = isond0.shape[3] levN = isond0.shape[2] basN = isond0.shape[1] varsig='ptopsigma' # Declare and open files for writing if os.path.isfile(outDir+'/'+outFile): os.remove(outDir+'/'+outFile) outFile_f = cdm.open(outDir+'/'+outFile,'w') # Testing mme with less models #listFiles=listFiles[0:4] #timN = isond0.shape[0] timN = t2-t1 runN = len(listFiles) print ' Number of members:',len(listFiles) valmask = isond0.missing_value[0] varList = ['isondepth','isonpers','isonso','isonthetao','isonthick','isonvol'] varFill = [0.,0.,valmask,valmask,0.,0.] # init arrays (2D rho/lat) percent = npy.ma.ones([runN,timN,basN,levN,latN], dtype='float32')*0. #minbowl = npy.ma.ones([basN,latN], dtype='float32')*1000. varbowl = npy.ma.ones([runN,timN,basN,latN], dtype='float32')*1. #varList = ['isondepth'] #print ' !!! ### Testing one variable ###' #varList = ['isonthetao'] # init time axis time = cdm.createAxis(npy.float32(range(timN))) time.id = 'time' time.units = 'years since 1861' time.designateTime() # init ensemble axis ensembleAxis = cdm.createAxis(npy.float32(range(runN))) ensembleAxis.id = 'members' ensembleAxis.units = 'N' # loop on variables for iv,var in enumerate(varList): # Array inits (2D rho/lat 3D rho/lat/lon) #shapeR = [basN,levN,latN] isonvar = npy.ma.ones([runN,timN,basN,levN,latN], dtype='float32')*valmask vardiff,varbowl2D = [npy.ma.ones(npy.ma.shape(isonvar)) for _ in range(2)] varstd,varToE1,varToE2 = [npy.ma.ones([runN,basN,levN,latN], dtype='float32')*valmask for _ in range(3)] varones = npy.ma.ones([runN,timN,basN,levN,latN], dtype='float32')*1. print ' Variable ',iv, var # loop over files to fill up array for i,file in enumerate(listFiles): ft = cdm.open(inDir[0]+'/'+file) model = file.split('.')[1] timeax = ft.getAxis('time') file1d = replace(inDir[0]+'/'+file,'2D','1D') if os.path.isfile(file1d): f1d = cdm.open(file1d) else: print 'ERROR:',file1d,'missing (if mme, run 1D first)' sys.exit(1) tmax = timeax.shape[0] if i == 0: tmax0 = tmax #adapt [t1,t2] time bounds to piControl last NN years if useLastYears: t1 = tmax-t20 t2 = tmax else: if tmax != tmax0: print 'wrong time axis: exiting...' return # read array # loop over time/density for memory management for it in range(timN): t1r = t1 + it t2r = t1r + 1 isonRead = ft(var,time = slice(t1r,t2r)) if varFill[iv] != valmask: isonvar[i,it,...] = isonRead.filled(varFill[iv]) else: isonvar[i,it,...] = isonRead # compute percentage of non-masked points accros MME if iv == 0: maskvar = mv.masked_values(isonRead.data,valmask).mask percent[i,...] = npy.float32(npy.equal(maskvar,0)) if mme: # if mme then just accumulate Bowl, Agree fields varst = var+'Agree' vardiff[i,...] = ft(varst,time = slice(t1,t2)) varb = var+'Bowl' varbowl2D[i,...] = ft(varb,time = slice(t1,t2)) else: # Compute difference with average of first initN years varinit = cdu.averager(isonvar[i,peri1:peri2,...],axis=0) for t in range(timN): vardiff[i,t,...] = isonvar[i,t,...] - varinit vardiff[i,...].mask = isonvar[i,...].mask # Read bowl and truncate 2D field above bowl if iv == 0: bowlRead = f1d(varsig,time = slice(t1,t2)) varbowl[i,...] = bowlRead # Compute Stddev varstd[i,...] = npy.ma.std(isonvar[i,...], axis=0) # Compute ToE if ToeType == 'histnat': # Read mean and Std dev from histnat if i == 0: filehn = glob.glob(inDir[1]+'/cmip5.'+model+'.*zon2D*')[0] #filehn = replace(outFile,'historical','historicalNat') fthn = cdm.open(filehn) varmeanhn = fthn(var) varst = var+'Std' varmaxstd = fthn(varst) toemult = 1. signal = npy.reshape(isonvar[i,...]-varmeanhn,(timN,basN*levN*latN)) noise = npy.reshape(varmaxstd,(basN*levN*latN)) varToE1[i,...] = npy.reshape(findToE(signal, noise, toemult),(basN,levN,latN)) toemult = 2. varToE2[i,...] = npy.reshape(findToE(signal, noise, toemult),(basN,levN,latN)) ft.close() f1d.close() # <-- end of loop on files # Compute percentage of bin presence # Only keep points where percent > 50% if iv == 0: percenta = (cdu.averager(percent,axis=0))*100. percenta = mv.masked_less(percenta, 50) percentw = cdm.createVariable(percenta, axes = [time,axesList[1],axesList[2],axesList[3]], id = 'isonpercent') percentw._FillValue = valmask percentw.long_name = 'percentage of MME bin' percentw.units = '%' outFile_f.write(percentw.astype('float32')) # Sign of difference if mme: vardiffsgSum = cdu.averager(vardiff, axis=0) vardiffsgSum = cdm.createVariable(vardiffsgSum , axes =[time,axesList[1],axesList[2],axesList[3]] , id = 'foo') vardiffsgSum = maskVal(vardiffsgSum, valmask) vardiffsgSum.mask = percentw.mask else: vardiffsg = npy.copysign(varones,vardiff) # average signs vardiffsgSum = cdu.averager(vardiffsg, axis=0) vardiffsgSum = mv.masked_greater(vardiffsgSum, 10000.) vardiffsgSum.mask = percentw.mask vardiffsgSum._FillValue = valmask # average variable accross members isonVarAve = cdu.averager(isonvar, axis=0) isonVarAve = cdm.createVariable(isonVarAve , axes =[time,axesList[1],axesList[2],axesList[3]] , id = 'foo') # mask if varFill[iv] == valmask: isonVarAve = maskVal(isonVarAve, valmask) isonVarAve.mask = percentw.mask # Only keep points with rhon > bowl-delta_rho delta_rho = 0. if mme: # start from average of <var>Agree isonVarBowl = cdu.averager(varbowl2D, axis=0) isonVarBowl = cdm.createVariable(isonVarBowl , axes =[time,axesList[1],axesList[2],axesList[3]] , id = 'foo') isonVarBowl = maskVal(isonVarBowl, valmask) isonVarBowl.mask = percentw.mask # Compute intermodel stddev isonVarStd = statistics.std(varbowl2D, axis=0) isonVarStd = cdm.createVariable(isonVarStd , axes =[time,axesList[1],axesList[2],axesList[3]] , id = 'foo') isonVarStd = maskVal(isonVarStd, valmask) isonVarStd.mask = percentw.mask if iv == 0: # Read mulitmodel sigma on bowl and average in time file1d = replace(outDir+'/'+outFile,'2D','1D') if os.path.isfile(file1d): f1d = cdm.open(file1d) else: print 'ERROR:',file1d,'missing (if mme, run 1D first)' sys.exit(1) bowlRead = f1d(varsig,time = slice(t1,t2)) f1d.close() siglimit = cdu.averager(bowlRead, axis=0) - delta_rho # TODO: remove loop by building global array with 1/0 for il in range(latN): for ib in range(basN): #if ib == 2: # print il, siglimit[ib,il] if siglimit[ib,il] < valmask/1000.: # if mme bowl density defined, mask above bowl index = (npy.argwhere(sigmaGrd[:] >= siglimit[ib,il])) isonVarBowl [:,ib,0:index[0],il].mask = True isonVarStd [:,ib,0:index[0],il].mask = True vardiffsgSum[:,ib,0:index[0],il].mask = True else: # mask all points isonVarBowl [:,ib,:,il].mask = True isonVarStd [:,ib,:,il].mask = True vardiffsgSum[:,ib,:,il].mask = True else: isonVarBowl = isonVarAve*1. # start from variable isonVarStd = isonVarAve*1. # start from variable if iv == 0: siglimit = cdu.averager(varbowl, axis=0) # average accross members # Average bowl in time if timeBowl == 'mean': siglimit = cdu.averager(siglimit, axis=0) - delta_rho # or take largest sigma over time else: siglimit = npy.ma.max(siglimit, axis=0) - delta_rho # TODO: remove loop by building global array with 1/0 for il in range(latN): for ib in range(basN): if siglimit[ib,il] < valmask/1000.: # if bowl density defined, mask above bowl index = (npy.argwhere(sigmaGrd[:] >= siglimit[ib,il])) isonVarBowl[:,ib,0:index[0],il].mask = True vardiffsgSum[:,ib,0:index[0],il].mask = True else: # mask all points vardiffsgSum[:,ib,:,il].mask = True isonVarBowl = maskVal(isonVarBowl, valmask) # Find max of Std dev of all members isonVarStd = npy.ma.max(varstd, axis=0) # mask if varFill[iv] == valmask: isonVarStd = maskVal(isonVarStd, valmask) # Write isonave = cdm.createVariable(isonVarAve, axes = [time,axesList[1],axesList[2],axesList[3]], id = isonRead.id) isonave.long_name = isonRead.long_name isonave.units = isonRead.units isonavediff = cdm.createVariable(vardiffsgSum, axes = [time,axesList[1],axesList[2],axesList[3]], id = isonRead.id+'Agree') isonavediff.long_name = isonRead.long_name isonavediff.units = isonRead.units isonavebowl = cdm.createVariable(isonVarBowl, axes = [time,axesList[1],axesList[2],axesList[3]], id = isonRead.id+'Bowl') isonavebowl.long_name = isonRead.long_name isonavebowl.units = isonRead.units if not mme: isonmaxstd = cdm.createVariable(isonVarStd, axes = [axesList[1],axesList[2],axesList[3]], id = isonRead.id+'Std') isonmaxstd.long_name = isonRead.long_name isonmaxstd.units = isonRead.units outFile_f.write( isonave.astype('float32')) outFile_f.write(isonavediff.astype('float32')) outFile_f.write(isonavebowl.astype('float32')) if not mme: outFile_f.write( isonmaxstd.astype('float32')) if ToeType == 'histnat': isontoe1 = cdm.createVariable(varToE1, axes = [ensembleAxis,axesList[1],axesList[2],axesList[3]], id = isonRead.id+'ToE1') isontoe1.long_name = 'ToE 1 for '+isonRead.long_name isontoe1.units = 'Year' isontoe2 = cdm.createVariable(varToE2, axes = [ensembleAxis,axesList[1],axesList[2],axesList[3]], id = isonRead.id+'ToE2') isontoe2.long_name = 'ToE 2 for '+isonRead.long_name isontoe2.units = 'Year' outFile_f.write(isontoe1.astype('float32')) outFile_f.write(isontoe2.astype('float32')) if mme: isonvarstd = cdm.createVariable(isonVarStd , axes =[time,axesList[1],axesList[2],axesList[3]] , id = isonRead.id+'ModStd') isonvarstd.long_name = isonRead.long_name+' intermodel std' isonvarstd.units = isonRead.units outFile_f.write(isonvarstd.astype('float32')) # <--- end of loop on variables outFile_f.close() fi.close()
varpms = np.ma.std(varpiC_p, axis=0) varims = np.ma.std(varpiC_i, axis=0) # -- reorganise i,j dims in single dimension data (speeds up loops) varCO2_a = np.reshape(varCO2_a, (timN, levN * latN)) varpiC_a = np.reshape(varpiC_a, (timN, levN * latN)) varams = np.reshape(varams, (levN * latN)) varCO2_p = np.reshape(varCO2_p, (timN, levN * latN)) varpiC_p = np.reshape(varpiC_p, (timN, levN * latN)) varpms = np.reshape(varpms, (levN * latN)) varCO2_i = np.reshape(varCO2_i, (timN, levN * latN)) varpiC_i = np.reshape(varpiC_i, (timN, levN * latN)) varims = np.reshape(varims, (levN * latN)) # -- Compute ToE as last date when diff 1pctCO2 - piControl is larger than mult * stddev toe_a = np.reshape(findToE(varCO2_a - varpiC_a, varams, multStd), (levN, latN)) toe_p = np.reshape(findToE(varCO2_p - varpiC_p, varpms, multStd), (levN, latN)) toe_i = np.reshape(findToE(varCO2_i - varpiC_i, varims, multStd), (levN, latN)) # -- Average bowl position bowlCO2_a = np.ma.average(bowlCO2_a, axis=0) bowlCO2_p = np.ma.average(bowlCO2_p, axis=0) bowlCO2_i = np.ma.average(bowlCO2_i, axis=0) bowlpiC_a = np.ma.average(bowlpiC_a, axis=0) bowlpiC_p = np.ma.average(bowlpiC_p, axis=0) bowlpiC_i = np.ma.average(bowlpiC_i, axis=0) # -- Mask var_mask = np.ma.getmask(np.ma.average(f2dCO2.variables[var][:], axis=0)) toe_a = np.ma.array(toe_a, mask=var_mask[1, :, :]) toe_p = np.ma.array(toe_p, mask=var_mask[2, :, :])
if use_piC == False: varsignal_a[145:, :, :] = varrcp_a - meanvarhn_a varsignal_p[145:, :, :] = varrcp_p - meanvarhn_p varsignal_i[145:, :, :] = varrcp_i - meanvarhn_i else: varsignal_a[145:, :, :] = varrcp_a - meanvarpiC_a varsignal_p[145:, :, :] = varrcp_p - meanvarpiC_p varsignal_i[145:, :, :] = varrcp_i - meanvarpiC_i # Reorganise i,j dims in single dimension data (speeds up loops) varsignal_a = np.reshape(varsignal_a, (timN, levN * latN)) varsignal_p = np.reshape(varsignal_p, (timN, levN * latN)) varsignal_i = np.reshape(varsignal_i, (timN, levN * latN)) # Compute ToE as last date when diff hist+RCP - histNat is larger than mult * stddev toe_a = np.reshape(findToE(varsignal_a, stdvarhn_a, multStd), (levN, latN)) toe_p = np.reshape(findToE(varsignal_p, stdvarhn_p, multStd), (levN, latN)) toe_i = np.reshape(findToE(varsignal_i, stdvarhn_i, multStd), (levN, latN)) # Save in output variable varToE[k, 1, :, :] = toe_a varToE[k, 2, :, :] = toe_p varToE[k, 3, :, :] = toe_i # Save in output file if use_piC == False: fileName = 'cmip5.' + model[ 'name'] + '.toe_zonal_rcp_histNat.nc'