sos = mv.masked_equal(sos, 0.) print sos.count() sos.data[:] = sos.filled(valmask) tos.mask = sos.mask tos.data[:] = tos.filled(valmask) qnet.mask = sos.mask qnet.data[:] = qnet.filled(valmask) emp.mask = sos.mask emp.data[:] = emp.filled(valmask) # Read masking value # added if wfcorr == masked values everywhere emp.mask = sos.mask emp.data[:] = emp.filled(valmask) # Test variable units [sos, sosFixed] = fixVarUnits(sos, 'sos', True) #,'logfile.txt') if sosFixed: print ' sos: units corrected' [tos, tosFixed] = fixVarUnits(tos, 'thetao', True) #,'logfile.txt') if tosFixed: print ' tos: units corrected' # Physical inits #P = 0 # surface pressure # find non-masked points #maskin = mv.masked_values(tos.data[0], valmask).mask #nomask = npy.equal(maskin,0) # # target horizonal grid for interp if noInterp: outgrid = ingrid fileg = '/data/vestella/Masks/Mask_Convect_Atlantic_40N.nc'
logtime_now = datetime.datetime.now() logtime_format = logtime_now.strftime("%y%m%d_%H%M%S") time_since_start = time.time() - start_time time_since_start_s = '%09.2f' % time_since_start writeToLog( logfile, "".join([ '** ', filecount_s, ': ', logtime_format, ' ', time_since_start_s, 's; piControl file skipped : ', l, ' **\n' ])) continue elif experiment.__contains__('pctto'): d = f_in(var) # Check units and correct in case of salinity if var == 'so' or var == 'sos': [d, _] = fixVarUnits(d, var, True) # Create ~50-yr linear trend - with period dependent on experiment #(slope,intercept),(slope_err,intercept_err) = linearregression(d(),error=1) (slope), (slope_err) = linearregression(fixInterpAxis(d), error=1, nointercept=1) slope = slope.astype('float32') # Recast from float64 back to float32 precision - half output file sizes slope_err = slope_err.astype('float32') # Recast from float64 back to float32 precision - half output file sizes slope.comment = 'start-end change' # Create ~50-yr mean climatology clim = cdutil.YEAR.climatology(d()) del (d)
# Report failure to logfile print "** PROBLEM 1 (read var error - ann calc failed) with: " + l + " found and breaking to next loop entry.. **" nc_bad1 = nc_bad1 + 1; if 'logfile' in locals(): logtime_now = datetime.datetime.now() logtime_format = logtime_now.strftime("%y%m%d_%H%M%S") time_since_start = time.time() - start_time ; time_since_start_s = '%09.2f' % time_since_start err_text = 'PROBLEM 1 (read var error - ann calc failed) creating ' writeToLog(logfile,"".join(['** ',format(nc_bad1,"07d"),': ',logtime_format,' ',time_since_start_s,'s; ',err_text,l,' **'])) continue # Explicitly set timeBounds - problem with cmip5.NorESM1-M.rcp45.r1i1p1.mo.tas.ver-v20110901.xml cdu.setTimeBoundsMonthly(d) # Check units and correct in case of salinity if var in ['so','sos']: [d,_] = fixVarUnits(d,var,True,logfile) # Get time dimension and convert to component time dt = d.getTime() dtc = dt.asComponentTime() dfirstyr = dtc[0].year dlastyr = dtc[-1].year # Use cdutil averager functions to generate annual means print "** Calculating annual mean **" time_anncalc_start = time.time() try: # Determine first January for counter,compTime in enumerate(dtc): if compTime.month == 1: index_start = counter break
d = f_in[var] ; # Create variable object - square brackets indicates cdms "file object" and it's associated axes # Determine experiment experiment = l.split('/')[-1].split('.')[2] time_calc_start = time.time() if experiment == 'picntrl': # Case of piControl files, need to consider spawning time of subsequent experiment logtime_now = datetime.datetime.now() logtime_format = logtime_now.strftime("%y%m%d_%H%M%S") time_since_start = time.time() - start_time ; time_since_start_s = '%09.2f' % time_since_start writeToLog(logfile,"".join(['** ',filecount_s,': ',logtime_format,' ',time_since_start_s,'s; piControl file skipped : ',l,' **\n'])) continue elif experiment.__contains__('pctto'): d = f_in(var) # Check units and correct in case of salinity if var == 'so' or var == 'sos': [d,_] = fixVarUnits(d,var,True) # Create ~50-yr linear trend - with period dependent on experiment #(slope,intercept),(slope_err,intercept_err) = linearregression(d(),error=1) (slope),(slope_err) = linearregression(fixInterpAxis(d),error=1,nointercept=1) slope = slope.astype('float32') ; # Recast from float64 back to float32 precision - half output file sizes slope_err = slope_err.astype('float32') ; # Recast from float64 back to float32 precision - half output file sizes slope.comment = 'start-end change' # Create ~50-yr mean climatology clim = cdutil.YEAR.climatology(d()) del(d) ; gc.collect() clim = clim.astype('float32') ; # Recast from float64 back to float32 precision - half output file sizes clim.comment = 'start-end climatological mean' outfile = re.sub("[0-9]{4}-[0-9]{4}","start-end_ClimAndSlope",l) outfile = re.sub(".xml",".nc",outfile) ; # Correct for 3D an.xml files
"*****\n", filePath.split("/")[-1], " has zero-valued arrays,", " skipping to next file..\n", "*****", ] ) print(reportStr) writeToLog(logFile, reportStr) continue # Validate variable axes # for i in range(len(d1.shape)): # ax = d1.getAxis(i) # print(ax.id,len(ax)) # pdb.set_trace() d1, varFixed = fixVarUnits(d1, var, report=True, logFile=logFile) # print('d1.max():',d1.max().max().max(),'d1.min():',d1.min().min().min()) ; Moved below for direct comparison # print('d1 loaded') # pdb.set_trace() times = d1.getTime() print("starts :", times.asComponentTime()[0]) print("ends :", times.asComponentTime()[-1]) print("Time:", datetime.datetime.now().strftime("%H%M%S"), "cdu start") climLvl = cdu.YEAR.climatology(d1) # print('climLvl created') # pdb.set_trace() print("Time:", datetime.datetime.now().strftime("%H%M%S"), "cdu end") clim = climLvl # pdb.set_trace() climInterp = climLvl.regrid( woaGrid, regridTool="ESMF", regridMethod="linear")
time_since_start = time.time() - start_time time_since_start_s = '%09.2f' % time_since_start err_text = 'PROBLEM 1 (read var error - ann calc failed) creating ' writeToLog( logfile, "".join([ '** ', format(nc_bad1, "07d"), ': ', logtime_format, ' ', time_since_start_s, 's; ', err_text, l, ' **' ])) continue # Explicitly set timeBounds - problem with cmip5.NorESM1-M.rcp45.r1i1p1.mo.tas.ver-v20110901.xml cdu.setTimeBoundsMonthly(d) # Check units and correct in case of salinity if var in ['so', 'sos']: [d, _] = fixVarUnits(d, var, True, logfile) # Get time dimension and convert to component time dt = d.getTime() dtc = dt.asComponentTime() dfirstyr = dtc[0].year dlastyr = dtc[-1].year # Use cdutil averager functions to generate annual means print "** Calculating annual mean **" time_anncalc_start = time.time() try: # Determine first January for counter, compTime in enumerate(dtc): if compTime.month == 1: index_start = counter break
sos = mv.masked_equal(sos,0.) print sos.count() sos.data[:] = sos.filled(valmask) tos.mask = sos.mask tos.data[:] = tos.filled(valmask) qnet.mask = sos.mask qnet.data[:] = qnet.filled(valmask) emp.mask = sos.mask emp.data[:] = emp.filled(valmask) # Read masking value # added if wfcorr == masked values everywhere emp.mask = sos.mask emp.data[:] = emp.filled(valmask) # Test variable units [sos,sosFixed] = fixVarUnits(sos,'sos',True)#,'logfile.txt') if sosFixed: print ' sos: units corrected' [tos,tosFixed] = fixVarUnits(tos,'thetao',True)#,'logfile.txt') if tosFixed: print ' tos: units corrected' # Physical inits #P = 0 # surface pressure # find non-masked points #maskin = mv.masked_values(tos.data[0], valmask).mask #nomask = npy.equal(maskin,0) # # target horizonal grid for interp if noInterp: outgrid = ingrid fileg = '/data/vestella/Masks/Mask_Convect_Atlantic_40N.nc'
try: d = f_in(var,time=(start_yr_ct,end_yr_ct,'con')) #t = d.getAxis(0) #print 'start: ',start_yr_ct,' ',t.asComponentTime()[0] #print 'end : ',end_yr_ct,' ',t.asComponentTime()[-1] #sys.exit() except: logtime_now = datetime.datetime.now() logtime_format = logtime_now.strftime("%y%m%d_%H%M%S") time_since_start = time.time() - start_time ; time_since_start_s = '%09.2f' % time_since_start writeToLog(logfile,"".join(['** ',filecount_s,': ',logtime_format,' ',time_since_start_s,'s; PROBLEM file skipped : ',l,' **'])) continue # Check units and correct in case of salinity if var in ['so','sos']: [d,_] = fixVarUnits(d,var,report=True) # Create linear trend and climatology if var in ['so','thetao','uo','vo'] and model in ['MIROC4h','MPI-ESM-MR']: # Deal with memory limits for 3D variables - Build output arrays clim = np.ma.zeros([1,d.shape[1],d.shape[2],d.shape[3]]) slope = np.ma.zeros([d.shape[1],d.shape[2],d.shape[3]]) slope_err = np.ma.zeros([d.shape[1],d.shape[2],d.shape[3]]) if model in 'MIROC4h': level_count = 5; # 10: ~38%; 20: ~70% elif model in 'MPI-ESM-MR': level_count = 20 ; # 20: ~23% # CCSM4 ~25% - next largest grid - no need to loop over levels for depth in range(0,((d.shape[1])-1),level_count): print "".join(['lev: ',format(depth,'02d'),' of ',str((d.shape[1])-1)]) d_level = d(lev=slice(depth,depth+level_count,1)) # Generate climatology