def doCalTest(self, a, b, c, d, e, f, g, cal=cdtime.MixedCalendar): x = cdtime.comptime(d, e, f) units = "days since %d-%d-%d" % (a, b, c) r = x.torel(units, cal) self.assertTrue(self.isEqual(r.value, g)) r2 = cdtime.reltime(g, units) x2 = r2.tocomp(cal) self.assertTrue(self.cmpYear(x, x2)) units2 = "days since %d-%d-%d" % (d, e, f) r3 = cdtime.reltime(10.0, units2) r4 = r3.torel(units) self.assertTrue(self.isEqual(r4.value, (10.0 + g))) bb = cdtime.comptime(a, b, c) x2 = bb.add(g, cdtime.Day, cal) self.assertTrue(self.cmpYear(x, x2)) x2 = x.sub(g, cdtime.Day, cal) self.assertTrue(self.cmpYear(bb, x2)) r2 = cdtime.reltime(g, units) r3 = r2.add(1000.0, cdtime.Day, cal) self.assertTrue(self.isEqual(r3.value, g + 1000.0)) r3 = r2.sub(1000.0, cdtime.Day, cal) self.assertTrue(self.isEqual(r3.value, g - 1000.0))
def testSubRegionTimes(self): # subRegion - time types s2 = self.var.subRegion(latitude=(-42., 42., 'ccn'), longitude=(90., 270., 'con'), time=('2001-1', '2002-1', 'ccn')) self.assertTrue(numpy.ma.allequal(self.vp, s2)) t1 = cdtime.comptime(2001) t2 = cdtime.comptime(2002) s2 = self.var.subRegion(latitude=(-42., 42., 'ccn'), longitude=(90., 270., 'con'), time=(t1, t2)) self.assertTrue(numpy.ma.allequal(self.vp, s2)) t1 = cdtime.comptime(2003) t2 = cdtime.comptime(2004) with self.assertRaises(cdms2.CDMSError): s2 = self.var.subRegion(latitude=(-42., 42., 'ccn'), longitude=(90., 270., 'con'), time=(t1, t2)) t1 = cdtime.reltime(0, "years since 2001") t2 = cdtime.reltime(1, "year since 2001") s2 = self.var.subRegion(latitude=(-42., 42., 'ccn'), longitude=(90., 270., 'con'), time=(t1, t2, 'ccn')) self.assertTrue(numpy.ma.allequal(self.vp, s2)) xx = self.var.subRegion('2000') self.assertTrue(numpy.ma.allequal(xx, self.var(time="2000")))
def plot_obs_trends(self, dataset, **kwargs): X = self.OBS[string.upper(dataset)] west = X.reshaped["west"] east = X.reshaped["east"] if "start" not in kwargs.keys(): start = cmip5.start_time(east) start = cdtime.comptime(start.year, start.month, 1) else: start = kwargs.pop("start") if "stop" not in kwargs.keys(): stop = cmip5.stop_time(east) stop = cdtime.comptime(stop.year, stop.month, 30) else: stop = kwargs.pop("stop") west = west(time=(start, stop)) east = east(time=(start, stop)) west.getAxis(0).id = "time" east.getAxis(0).id = "time" plt.plot(cmip5.get_linear_trends(west).asma(), label="WEST", color=get_colors("west"), **kwargs) plt.plot(cmip5.get_linear_trends(east).asma(), label="EAST", color=get_colors("east"), **kwargs) months = [ "JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC" ] plt.xticks(np.arange(12), months)
def recup_nfs_sst_seviri_recent(self, URL_SEVIRI_DATA, YYYY, MM, DD, timerange): """ Recuperation par NFS d un fichier de SST SEVIRI horaire recent (depuis 01/01/2012)""" import os, subprocess, shutil import cdtime, glob from vacumm.misc.atime import strftime # -- Complement du nom de repertoire avec l'annee et le jour relatif (001-> 365) DATA_DIR = os.path.join(URL_SEVIRI_DATA, str(YYYY)) # print DATA_DIR a = cdtime.comptime(YYYY, MM, DD) a2 = a.torel("days since " + strftime("%Y-%m-%d", cdtime.comptime(YYYY - 1, 12, 31))) DATA_DIR = os.path.join(DATA_DIR, "%(#)03d" % {"#": a2.value}) # print DATA_DIR if timerange == "midnight": filename = "%(#)04d%(##)02d%(####)02d00*" % {"#": YYYY, "##": MM, "####": DD} else: filename = "%(#)04d%(##)02d%(####)02d*" % {"#": YYYY, "##": MM, "####": DD} copy_mode = "nfs" list_file = glob.glob(os.path.join(DATA_DIR, filename)) # si la liste est vide, on essaie en se connectant a service7 if not list_file: find_cmd = "ssh service7 \"find %(DATA_DIR)s -name '%(filename)s' \"" % vars() list_file = subprocess.check_output(find_cmd, shell=True).strip().split() if list_file: copy_mode = "scp" for file_to_read in list_file: if os.path.isfile(os.path.basename(file_to_read)) == False: if copy_mode == "nfs": shutil.copyfile(file_to_read, os.path.basename(file_to_read)) if copy_mode == "scp": copy_cmd = "scp caparmor-sftp.ifremer.fr:%(file_to_read)s ." % vars() subprocess.check_call(copy_cmd, shell=True)
def checkDatawc(self, name, value): checkName(self, name, value) if isNumber(value): value = float(value), 0 elif isinstance(value, str): t = cdtime.s2c(value) if t != cdtime.comptime(0, 1): t = t.torel(self.datawc_timeunits, self.datawc_calendar) value = float(t.value), 1 else: checkedRaise( self, value, ValueError, 'The ' + name + ' attribute must be either an integer or a float value or a date/time.' ) elif type(value) in [ type(cdtime.comptime(1900)), type(cdtime.reltime(0, 'days since 1900')) ]: value = value.torel(self.datawc_timeunits, self.datawc_calendar).value, 1 else: checkedRaise( self, value, ValueError, 'The ' + name + ' attribute must be either an integer or a float value or a date/time.' ) return value
def app_test(): short_run = True start_time = cdtime.comptime( 1980, 1 ) end_time = cdtime.comptime( 1980, 7 ) if short_run else cdtime.comptime( 1982, 1 ) decomp_mode = DecompositionMode.Temporal dataset = {} dataset['path'] = '/Users/tpmaxwel/Data/MERRA_hourly_2D_precip/MERRA_hourly_precip.xml' dataset[ 'variable' ] = 'prectot' time_specs = {} time_specs['start_time'] = str( start_time ) time_specs['end_time'] = str( end_time ) time_specs[ 'period_value' ] = 1 time_specs[ 'period_units' ] = cdtime.Month grid = {} grid['lat'] = [ -90, 90 ] grid[ 'lon' ] = [ -180, 180 ] task_metadata = {} task_metadata['time'] = time_specs task_metadata['grid'] = grid task_metadata['dataset'] = dataset testApp = IOTestApp( task_metadata ) testApp.execute( decomp_mode )
def testCal(a, b, c, d, e, f, g, cal=cdtime.MixedCalendar): x = cdtime.comptime(d, e, f) units = "days since %d-%d-%d" % (a, b, c) r = x.torel(units, cal) if not isEqual(r.value, g): markError('component => relative failed: %s %s' % ( ` x `, ` r `)) r2 = cdtime.reltime(g, units) x2 = r2.tocomp(cal) if not cmpYear(x, x2): markError('relative => component failed: %s %s' % ( ` r2 `, ` x2 `)) units2 = "days since %d-%d-%d" % (d, e, f) r3 = cdtime.reltime(10.0, units2) r4 = r3.torel(units) if not isEqual(r4.value, (10.0 + g)): markError('relative => relative: %s %s' % ( ` r3 `, ` r4 `)) bb = cdtime.comptime(a, b, c) x2 = bb.add(g, cdtime.Day, cal) if not cmpYear(x, x2): markError('component add failed: %s %s' % ( ` bb `, ` x2 `)) x2 = x.sub(g, cdtime.Day, cal) if not cmpYear(bb, x2): markError('component sub failed: %s %s' % ( ` x `, ` x2 `)) r2 = cdtime.reltime(g, units) r3 = r2.add(1000.0, cdtime.Day, cal) if not isEqual(r3.value, g + 1000.0): markError('relative add failed: %s %s' % ( ` r2 `, ` r3 `)) r3 = r2.sub(1000.0, cdtime.Day, cal) if not isEqual(r3.value, g - 1000.0): markError('relative sub failed: %s %s' % ( ` r2 `, ` r3 `))
def proj_aerosols(AA, piControl, H85, start=None, stop=None): if start is None: start = cdtime.comptime(1945, 1, 1) if stop is None: stop = cdtime.comptime(1984, 12, 31) data = [H85.reshaped["west"], H85.reshaped["east"]] nmod, nyears, nmonths = H85.reshaped["west"].shape P = MV.zeros((nmod, nyears)) msolver = AA.solvers["multi"] fac = da.get_orientation(msolver) for i in range(nmod): to_proj = [H85.reshaped["west"][i], H85.reshaped["east"][i]] P[i] = msolver.projectField(to_proj)[:, 0] * fac P.setAxis(0, H85.reshaped["west"].getAxis(0)) timeax = H85.reshaped["west"].getAxis(1) timeax.id = "time" P.setAxis(1, timeax) piCdata = [piControl.reshaped["west"], piControl.reshaped["east"]] pc = msolver.projectField(piCdata)[:, 0] Pt = P(time=(start, stop)) nt = len(Pt.getTime()) hslopes = cmip5.get_linear_trends(Pt) pslopes = da.get_slopes(pc, nt)
def teestTimes(self): fsc = cdms2.open(os.path.join(cdat_info.get_sampledata_path(),'tas_mo_clim.nc')) print("Step #0 : Reading data") s=self.f('tas',longitude=(0,360,'co')) acok=fsc('climseas',longitude=(0,360,'co')) print('Test #1 : Test result') ac=cdutil.times.JAN.climatology(s) self.assertTrue(MV2.allclose(ac[0],acok[0])) fsc.close() a=cdtime.comptime(1980) b=cdtime.comptime(1980,5) f = cdms2.open(os.path.join(cdat_info.get_sampledata_path(),'tas_6h.nc')) s=f('tas',time=(a,b,'co'),squeeze=1) print("Test #2 : 6hourly AND get") jans=cdutil.times.JAN(s) print("Test #3 : climatology 6h") JFMA=cdutil.times.Seasons('JFMA') jfma=JFMA.climatology(s) #Test reorder print("Test #4 : time not first axis") jfma=JFMA.climatology(s(order='x...')) print("Test 4b: Result ok ?") self.assertEqual(jfma.getOrder()[0], 'x')
def old_Figure4(D,SM): axes=[] axes+=[plt.subplot(221)] t1981=cdtime.comptime(1981,1,1) t2017=cdtime.comptime(2017,12,31) pdsi_time_series(D,t1981,t2017,SM=SM,best_fit=False) axes+=[plt.subplot(222)] pdsi_SN_figure(D) plt.title("(b): PDSI") plt.legend(fontsize=8) axes+=[plt.subplot(223)] noise,signal_gleam,signal_merra2,H85=soil_SN_figure(SM,"30cm") plt.legend(fontsize=8) plt.title("(c): Surface soil moisture") axes+=[plt.subplot(224)] noise2,signal_gleam2,signal_merra22,H852=soil_SN_figure(SM,"2m") plt.title("(d): Root zone soil moisture") plt.legend(fontsize=8) h,l=axes[1].get_legend_handles_labels() axes[1].legend(h[:3],l[:3],fontsize=8) h,l=axes[2].get_legend_handles_labels() axes[2].legend(h[:2],l[:2],fontsize=8) h,l=axes[3].get_legend_handles_labels() axes[3].legend(h[:2],l[:2],fontsize=8) axes[0].legend(ncol=2,fontsize=8)
def average_vs_projection(data): """Show the difference between PDSI regional average and projections""" #start = cdtime.comptime(1975,8,1) #stop = cdtime.comptime(2005,12,31) start = cdtime.comptime(1900,1,1) stop = cdtime.comptime(1949,12,31) ax1=plt.subplot(211) ax2=plt.subplot(212) for thing in sorted(["MXDA","OWDA","NADA","MADA","ANZDA"])+["ALL"]: X=getattr(data,thing) pdsi_av = cdutil.averager(X.obs,axis='xy')(time=(start,stop)) c=colorregions(X.name) Plotting.time_plot(pdsi_av,lw=3,color=c,label=X.name,ax=ax1) Plotting.time_plot(X.projection(time=(start,stop)),lw=3,color=c,label=X.name,ax=ax2) if thing =="ALL": x=np.arange(1975,2006) y=X.projection(time=(start,stop)) p=np.polyfit(x,y,1) ax2.plot(x,np.polyval(p,x),"k--",lw=1) y=pdsi_av p=np.polyfit(x,y,1) ax1.plot(x,np.polyval(p,x),"k--",lw=1) ax1.set_title("(a): Regional mean PDSI") ax1.set_ylabel("PDSI") ax2.set_title("(b): Projection on fingerprint") # ax2.set_ylim(-22,22) ax2.set_ylabel("Projection") ax1.set_ylim(-1.6,1.6) plt.legend(ncol=4,fontsize=8)
def pad_by_10(X, year1, year2): """ Pad an array at the beginning with an artificial 10 year spinup, in which each value is set to the climatology """ if type(year1) == type("string"): year, month, day = year1.split("-") year1 = cdtime.comptime(int(year), int(month), int(day)) if type(year2) == type("string"): year, month, day = year2.split("-") year2 = cdtime.comptime(int(year), int(month), int(day)) tax = X.getTime() lastten = [year1.sub(x, cdtime.Months) for x in range(121)[1:]][::-1] dayax = np.array([x.torel(tax.units).value for x in lastten]) tax_new = np.append(dayax, tax) new_time_axis = cdms.createAxis(tax_new) new_time_axis.designateTime() new_time_axis.id = "time" new_time_axis.units = tax.units Xnew = MV.zeros((len(tax_new), ) + X.shape[1:]) Xnew[:120] = np.repeat(MV.average(X(time=(year1, year2)), axis=0).asma()[np.newaxis, :, :], 120, axis=0) Xnew[120:] = X for k in X.attributes.keys(): setattr(Xnew, k, X.attributes[k]) Xnew.setAxisList([new_time_axis] + X.getAxisList()[1:]) return Xnew
def testCal(a,b,c,d,e,f,g,cal=cdtime.MixedCalendar): x = cdtime.comptime(d,e,f) units = "days since %d-%d-%d"%(a,b,c) r = x.torel(units,cal) if not isEqual(r.value,g): markError('component => relative failed: %s %s'%(`x`,`r`)) r2 = cdtime.reltime(g, units) x2 = r2.tocomp(cal) if not cmpYear(x,x2): markError('relative => component failed: %s %s'%(`r2`,`x2`)) units2 = "days since %d-%d-%d"%(d,e,f) r3 = cdtime.reltime(10.0,units2) r4 = r3.torel(units) if not isEqual(r4.value,(10.0+g)): markError('relative => relative: %s %s'%(`r3`,`r4`)) bb = cdtime.comptime(a,b,c) x2 = bb.add(g,cdtime.Day,cal) if not cmpYear(x,x2): markError('component add failed: %s %s'%(`bb`,`x2`)) x2 = x.sub(g,cdtime.Day,cal) if not cmpYear(bb,x2): markError('component sub failed: %s %s'%(`x`,`x2`)) r2 = cdtime.reltime(g, units) r3 = r2.add(1000.0,cdtime.Day,cal) if not isEqual(r3.value, g+1000.0): markError('relative add failed: %s %s'%(`r2`,`r3`)) r3 = r2.sub(1000.0,cdtime.Day,cal) if not isEqual(r3.value, g-1000.0): markError('relative sub failed: %s %s'%(`r2`,`r3`))
def __init__(self, dataset): f = cdms.open("DATA/OBS/PROCESSED/" + dataset + ".nc") self.data = {} obs_w = f("pr_W") self.data["west"] = obs_w stop_time = cmip5.stop_time(obs_w) if stop_time.month != 12: stop_time = cdtime.comptime(stop_time.year - 1, 12, 31) start_time = cmip5.start_time(obs_w) if start_time.month != 1: start_time = cdtime.comptime(start_time.year + 1, 1, 1) obs_w = obs_w(time=(start_time, stop_time)) obs_w = fp.by_month(obs_w) obs_e = f("pr_CE") self.data["east"] = obs_e stop_time = cmip5.stop_time(obs_e) if stop_time.month != 12: stop_time = cdtime.comptime(stop_time.year - 1, 12, 31) start_time = cmip5.start_time(obs_e) if start_time.month != 1: start_time = cdtime.comptime(start_time.year + 1, 1, 1) obs_e = obs_e(time=(start_time, stop_time)) obs_e = fp.by_month(obs_e) self.reshaped = {} self.reshaped["east"] = obs_e - MV.average(obs_e, axis=0) self.reshaped["west"] = obs_w - MV.average(obs_w, axis=0) self.reshaped["multi"] = [self.reshaped["west"], self.reshaped["east"]] self.dataset = dataset
def test_xdrange_CdtimeCompDateInput_StringDateOutput(self): ''' Testing for xdrange string date with skipdays input ''' gen = days.xdrange(startdate = cdtime.comptime(2011,6,10),enddate = cdtime.comptime(2011,6,14), stepdays = 2, returnType = 's') checkdates = ['20110610','20110612','20110614'] count = 0 for date in gen: assert date == checkdates[count] count = count + 1
def DA_histogram(self, experiment, direction, start=None, stop=None, datasets=None): fingerprint = getattr(self, experiment) if start is None: start = cmip5.start_time(self.gpcp.reshaped["east"]) start = cdtime.comptime(start.year, start.month, 1) if stop is None: stop = cmip5.stop_time(self.gpcp.reshaped["east"]) stop = cdtime.comptime(stop.year, stop.month, 30) #get the h85 projections over the same time period H85m = self.model_projections(experiment, direction)(time=(start, stop)) H85 = cmip5.cdms_clone(np.ma.mask_rows(H85m), H85m) H85_trends = cmip5.get_linear_trends(H85) #get the piControl projection time series noise = self.noise_projections(experiment, direction) L = stop.year - start.year + 1 noise_trends = da.get_slopes(noise, L) #plot plt.hist(H85_trends.compressed(), 25, color=da_colors("h85"), alpha=.5, normed=True) plt.hist(noise_trends, 25, color=da_colors("piC"), alpha=.5, normed=True) da.fit_normals_to_data(H85_trends, color=da_colors("h85"), lw=3, label="H85") da.fit_normals_to_data(noise_trends, color=da_colors("piC"), lw=3, label="piControl") # plt.axvline(obs_trend,label=obs.dataset,color=da_colors(obs.dataset)) #Project the observations if datasets is None: datasets = ["gpcp", "cmap", "precl"] if type(datasets) != type([]): datasets = [datasets] for dataset in datasets: obs_proj = self.obs_projections(experiment, dataset, direction)(time=(start, stop)) obs_trend = cmip5.get_linear_trends(obs_proj) plt.axvline(obs_trend, label=dataset, color=da_colors(dataset)) print dataset + "S/N is: " + str(obs_trend / np.std(noise_trends))
def rcp_solver(D,early_start=None,early_stop=None): if early_start is None: early_start = cdtime.comptime(2006,1,1) if early_stop is None: early_stop=cdtime.comptime(2099,12,31) earlydata = D.ALL.model(time=(early_start,early_stop)) early_mma=MV.average(cmip5.ensemble2multimodel(earlydata),axis=0) earlysolver = Eof(early_mma,weights="area") return earlysolver
def aerosol_solver(D,aerosol_start=None,aerosol_stop=None,include_cru=False): if aerosol_start is None: aerosol_start = cdtime.comptime(1950,1,1) if aerosol_stop is None: aerosol_stop=cdtime.comptime(1975,1,1) aerosoldata = D.ALL.model(time=(aerosol_start,aerosol_stop)) aerosol_mma=MV.average(cmip5.ensemble2multimodel(aerosoldata),axis=0) aerosolsolver = Eof(aerosol_mma,weights="area") return aerosolsolver
def timestr2comp(self, date): """ :func:`timestr2comp`: To convert date from yyyymmdd[hh] formate into cdtime.comptime formate Condition : passing date must be yyyymmdd formate in either int or str Inputs: date in yyyymmdd formate or yyyymmddhh formate. i.e. hour(hh) is optional. Outputs: It should return the date in cdtime.comptime object type Usage: example1: >>> timestr2comp(20110423) 2011-4-23 0:0:0.0 .. note:: It should return as cdtime.comptype. Here we didnt pass the hour. i.e only yyyymmdd formate example2: >>> timestr2comp(2011082010) 2011-8-20 10:0:0.0 ..note:: Here it should return cdtime with hours also. We passed yyyymmddhh formate. i.e include hh example3: >>> timestr2comp(2011082023) 2011-8-20 23:0:0.0 ..note:: we cannot pass 24 as hour here. Max 23 hours only. Written by: Arulalan.T Date: 23.04.2011 Updated : 21.08.2011 """ if str(type(date)) == "<type 'comptime'>": # passed date itself comptime object only return date if isinstance(date, int): date = str(date) # re match if self.comppattern.match(date): # i.e. date is comptime in string formate # so make it as comptime object return cdtime.s2c(date) if self.ymdpattern.match(date): # i.e date is yyyymmdd string formate year = int(date[0:4]) month = int(date[4:6]) day = int(date[6:8]) if len(date) == 10: hour = int(date[-2:]) return cdtime.comptime(year, month, day, hour) else: return cdtime.comptime(year, month, day) else: raise _TimeUtilityStringError('The given date either comptime \ object or comptime string or yyyymmdd formate only')
def getCompTime( timeString ): print " >> GetCompTime: ", timeString timeStringFields = timeString.strip("'").split(' ') date = timeStringFields[0].split('-') if len( timeStringFields ) == 1: return cdtime.comptime( int(date[0]), int(date[1]), float(date[2]) ) else: time = timeStringFields[1].split(':') for iT in range(3): time.append(0) return cdtime.comptime( int(date[0]), int(date[1]), int(date[2]), int(time[0]), int(time[1]), float(time[2]) )
def DA_histogram(fingerprint, obslist, h85, piC, direction, start=None, stop=None): if type(obslist) == type([]): obs = obslist[0] else: obs = obslist if start is None: start = cmip5.start_time(obs.reshaped["east"]) start = cdtime.comptime(start.year, start.month, 1) if stop is None: stop = cmip5.stop_time(obs.reshaped["east"]) stop = cdtime.comptime(stop.year, stop.month, 30) #project the observations onto the fingerprint obs_proj = obs_projections(fingerprint, obs, direction)(time=(start, stop)) obs_trend = cmip5.get_linear_trends(obs_proj) #get the h85 projections over the same time period H85m = model_projections(fingerprint, h85, direction)(time=(start, stop)) H85 = cmip5.cdms_clone(np.ma.mask_rows(H85m), H85m) H85_trends = cmip5.get_linear_trends(H85) #get the piControl projection time series noise = noise_projections(fingerprint, piC, direction) L = len(obs_proj) noise_trends = da.get_slopes(noise, L) #plot plt.hist(H85_trends.compressed(), 25, color=da_colors("h85"), alpha=.5, normed=True) plt.hist(noise_trends, 25, color=da_colors("piC"), alpha=.5, normed=True) da.fit_normals_to_data(H85_trends, color=da_colors("h85"), lw=3, label="H85") da.fit_normals_to_data(noise_trends, color=da_colors("piC"), lw=3, label="piControl") plt.axvline(obs_trend, label=obs.dataset, color=da_colors(obs.dataset)) if type(obslist) == type([]): for obs in obslist[1:]: obs_proj = obs_projections(fingerprint, obs, direction)(time=(start, stop)) obs_trend = cmip5.get_linear_trends(obs_proj) plt.axvline(obs_trend, label=obs.dataset, color=da_colors(obs.dataset)) return H85, noise, obs_proj
def test_xdrange_CdtimeCompDateInput_CdtimeCompDateOutput(self): ''' Testing for xdrange string date with skipdays input ''' gen = days.xdrange(startdate = cdtime.comptime(2011,6,10),enddate = cdtime.comptime(2011,6,14), stepdays = 2, returnType = 'c') checkdates = [] checkdates.append(cdtime.comptime(2011,6,10)) checkdates.append(cdtime.comptime(2011,6,12)) checkdates.append(cdtime.comptime(2011,6,14)) count = 0 for date in gen: assert date == checkdates[count] count = count + 1
def read_data_in( path, var_in_data, var_to_consider, start_time, end_time, UnitsAdjust, LandMask, debug=False, ): f = cdms2.open(path) data_timeseries = f(var_in_data, time=(start_time, end_time), latitude=(-90, 90)) cdutil.setTimeBoundsMonthly(data_timeseries) # missing data check check_missing_data(data_timeseries) if UnitsAdjust[0]: data_timeseries = getattr(MV2, UnitsAdjust[1])(data_timeseries, UnitsAdjust[2]) if var_to_consider == "ts" and LandMask: # Replace temperature below -1.8 C to -1.8 C (sea ice) data_timeseries = sea_ice_adjust(data_timeseries) # Check available time window and adjust if needed data_stime = data_timeseries.getTime().asComponentTime()[0] data_etime = data_timeseries.getTime().asComponentTime()[-1] data_syear = data_stime.year data_smonth = data_stime.month data_eyear = data_etime.year data_emonth = data_etime.month if data_smonth > 1: data_syear = data_syear + 1 if data_emonth < 12: data_eyear = data_eyear - 1 debug_print( "data_syear: " + str(data_syear) + " data_eyear: " + str(data_eyear), debug) data_timeseries = data_timeseries(time=( cdtime.comptime(data_syear, 1, 1, 0, 0, 0), cdtime.comptime(data_eyear, 12, 31, 23, 59, 59), )) f.close() return data_timeseries, data_syear, data_eyear
def getCompTime(timeString): print " >> GetCompTime: ", timeString timeStringFields = timeString.strip("'").split(' ') date = timeStringFields[0].split('-') if len(timeStringFields) == 1: return cdtime.comptime(int(date[0]), int(date[1]), float(date[2])) else: time = timeStringFields[1].split(':') for iT in range(3): time.append(0) return cdtime.comptime(int(date[0]), int(date[1]), int(date[2]), int(time[0]), int(time[1]), float(time[2]))
def recup_ftp_sst_seviri_recent(self, URL_CERSAT, DATA_DIR, YYYY, MM, DD, ext, usr, pwd, ntry, timerange): """ Recuperation par FTP d un fichier de SST SEVIRI horaire recent (depuis 01/01/2012)""" import os, subprocess from ftplib import FTP import cdtime from vacumm.misc.atime import strftime # --------------- LE FTP FONCTIONNE MAIS LES FICHIERS SEMBLENT CORROMPUS ... PROBLEME DANS LE BZ2 # -- Complement du nom de repertoire avec l'annee et le jour relatif (001-> 365) DATA_DIR = os.path.join(DATA_DIR, str(YYYY)) print DATA_DIR a = cdtime.comptime(YYYY, MM, DD) a2 = a.torel("days since " + strftime("%Y-%m-%d", cdtime.comptime(YYYY - 1, 12, 31))) DATA_DIR = os.path.join(DATA_DIR, "%(#)03d" % {"#": a2.value}) print DATA_DIR # connection au CDOCO ftp = FTP(host=URL_CERSAT, user=usr, passwd=pwd) # positionnement sur le repertoire "best_estimate" ftp.cwd(DATA_DIR) # utile pour ftp.retrbinary def handleDownload(block): file.write(block) if timerange == "midnight": filename = "%(#)04d%(##)02d%(####)02d00*" % {"#": YYYY, "##": MM, "####": DD} else: filename = "%(#)04d%(##)02d%(####)02d*" % {"#": YYYY, "##": MM, "####": DD} # print filename list_file = ftp.nlst(filename) # print list_file for file_to_read in list_file: # recuperation fichier si non present dans work/MODEL/MARS if os.path.isfile(file_to_read) == False: # rajouter test sur date du fichier ... telecharge que si plus recent ... file = open(file_to_read, "wb") ftp.retrbinary("RETR " + file_to_read, handleDownload) subprocess.call(["bunzip2", "-f", file_to_read]) else: print "Pas de downloading : %(file_to_read)s existe deja" % vars() ftp.quit()
def makeCalendar(timeStart,timeEnd,calendarStep='months',monthStart=1,monthEnd=12,dayStep=1): """ Documentation for makeCalendar(): ----- The makeCalendar() function creates a time calendar for given dates Author: Paul J. Durack : [email protected] Inputs: ----- | **timeStart** - string start time (e.g. '2001' or '2001-1-1 0:0:0.0') | **timeEnd** - string end time | **calendarStep <optional>** - string either 'months' or 'days' | **monthStart <optional>** - int | **monthEnd <optional>** - int | **dayStep <optional>** - int Returns: ----- | **time** - cdms2 transient axis Usage: ----- >>> from durolib import makeCalendar >>> time = makeCalendar('2001','2014',calendarStep='month') Notes: ----- * PJD 30 Apr 2015 - Fixed 'off by one' error with partial years * TODO: Update to take full date identifier '2001-1-1 0:0:0.0', not just year * Issues with the daily calendar creation - likely require tweaks to cdutil.times.setAxisTimeBoundsDaily (range doesn't accept fractions, only ints) * Consider reviewing calendar assignment in /work/durack1/Shared/obs_data/AQUARIUS/read_AQ_SSS.py """ # First check inputs if calendarStep not in ['days','months',]: print '** makeCalendar error: calendarStep unknown, exiting..' return if not isinstance(timeStart,str) or not isinstance(timeEnd,str): print '** makeCalendar error: timeStart or timeEnd invalid, exiting..' return if not (int(monthStart) in range(1,13) and int(monthEnd) in range(1,13)): print '** makeCalendar error: monthStart or monthEnd invalid, exiting..' return try: timeStartTest = cdt.comptime(int(timeStart)) timeEndTest = cdt.comptime(int(timeEnd)) except SystemExit,err: print '** makeCalendar error: timeStart invalid - ',err return
def __init__(self,cfg=None): import os, cdtime Satellite.__init__(self) SCRIPT_DIR=os.getcwd() self.SCRIPT_DIR = SCRIPT_DIR #-- retrocompatibilite if cfg is None: config = ConfigParser.RawConfigParser() config.read(os.path.join(SCRIPT_DIR,'config.cfg')) andeb = config.getint('Time Period', 'andeb') anfin = config.getint('Time Period', 'anfin') mdeb = config.getint('Time Period', 'mdeb') mfin = config.getint('Time Period', 'mfin') jdeb = config.getint('Time Period', 'jdeb') jfin = config.getint('Time Period', 'jfin') hdeb = config.getint('Time Period', 'hdeb') hfin = config.getint('Time Period', 'hfin') self.WORKDIR = config.get('Env', 'workdir') else: andeb = cfg['Time Period']['andeb'] anfin = cfg['Time Period']['anfin'] mdeb = cfg['Time Period']['mdeb'] mfin = cfg['Time Period']['mfin'] jdeb = cfg['Time Period']['jdeb'] jfin = cfg['Time Period']['jfin'] hdeb = cfg['Time Period']['hdeb'] hfin = cfg['Time Period']['hfin'] self.WORKDIR = cfg['Env']['workdir'] #print "%(andeb)s-%(mdeb)s-%(jdeb)s %(hdeb)s"%vars() #print "%(anfin)s-%(mfin)s-%(jfin)s %(hfin)s"%vars() # Conversion en "Component Time" self.ctdeb=cdtime.comptime(andeb,mdeb,jdeb,hdeb,0,0) self.ctfin=cdtime.comptime(anfin,mfin,jfin,hfin,0,0) DIR_SST=os.path.join(self.WORKDIR,'SST') # repertoire de stockage des donnees if os.path.isdir(DIR_SST)==False: os.mkdir(DIR_SST) self.WORKDIR=os.path.join(self.WORKDIR,'SST') self.name="Satellite Sea surface Temperature" self.shortname="SST" self.units=" "
def __init__(self, cfg=None): import os, cdtime Satellite.__init__(self) SCRIPT_DIR = os.getcwd() self.SCRIPT_DIR = SCRIPT_DIR #-- retrocompatibilite if cfg is None: config = ConfigParser.RawConfigParser() config.read(os.path.join(SCRIPT_DIR, 'config.cfg')) andeb = config.getint('Time Period', 'andeb') anfin = config.getint('Time Period', 'anfin') mdeb = config.getint('Time Period', 'mdeb') mfin = config.getint('Time Period', 'mfin') jdeb = config.getint('Time Period', 'jdeb') jfin = config.getint('Time Period', 'jfin') hdeb = config.getint('Time Period', 'hdeb') hfin = config.getint('Time Period', 'hfin') self.WORKDIR = config.get('Env', 'workdir') else: andeb = cfg['Time Period']['andeb'] anfin = cfg['Time Period']['anfin'] mdeb = cfg['Time Period']['mdeb'] mfin = cfg['Time Period']['mfin'] jdeb = cfg['Time Period']['jdeb'] jfin = cfg['Time Period']['jfin'] hdeb = cfg['Time Period']['hdeb'] hfin = cfg['Time Period']['hfin'] self.WORKDIR = cfg['Env']['workdir'] #print "%(andeb)s-%(mdeb)s-%(jdeb)s %(hdeb)s"%vars() #print "%(anfin)s-%(mfin)s-%(jfin)s %(hfin)s"%vars() # Conversion en "Component Time" self.ctdeb = cdtime.comptime(andeb, mdeb, jdeb, hdeb, 0, 0) self.ctfin = cdtime.comptime(anfin, mfin, jfin, hfin, 0, 0) DIR_SST = os.path.join(self.WORKDIR, 'SST') # repertoire de stockage des donnees if os.path.isdir(DIR_SST) == False: os.mkdir(DIR_SST) self.WORKDIR = os.path.join(self.WORKDIR, 'SST') self.name = "Satellite Sea surface Temperature" self.shortname = "SST" self.units = " "
def test_xdrange_StringDate_CalendarInput_CdtimeCompDateOutput(self): ''' Testing for xdrange string date with skipdays input ''' gen = days.xdrange(startdate = '20120226',enddate = '20120301', calendarName = cdtime.NoLeapCalendar,returnType = 'c') checkdates = [] checkdates.append(cdtime.comptime(2012,2,26)) checkdates.append(cdtime.comptime(2012,2,27)) checkdates.append(cdtime.comptime(2012,2,28)) # 2012,2,29 not included, because we are passing NoLeapCalendar option checkdates.append(cdtime.comptime(2012,3,1)) count = 0 for date in gen: assert date == checkdates[count] count = count + 1
def NatureRevisions_Figure3(D,start_year=2019): plt.subplot(311) Plotting.time_plot(D.ALL.get_noise(),color=get_dataset_color("tree"),label="Pre-industrial Noise",lw=1) Plotting.time_plot(D.ALL.projection(time=('1850-1-1','1975-12-31')),c="k",lw=1) plt.ylabel("Projection (temporal amplitude)",fontsize=8) plt.xlabel("Year") plt.legend() plt.title("(a)")#: Global Drought Atlas Projection Onto Fingerprint",fontsize=8) plt.subplot(312) t1900 = cdtime.comptime(1900,7,1) times = np.arange(1,201) likely=stats.norm.interval(.66)[1] vlikely=stats.norm.interval(.90)[1] certain=stats.norm.interval(.99)[1] plt.axhline(likely,lw=1,color=cm.Reds(.33),label="Likely",alpha=.5) plt.axhline(vlikely,lw=1,color=cm.Reds(.66),label="Very Likely",alpha=.5) plt.axhline(certain,lw=1,color=cm.Reds(.99),label="Virtually Certain",alpha=.5) D.ALL.time_of_emergence(t1900,times=times,color="k",lw=3) pdsi_SN_figure(D,cdtime.comptime(1900,1,1),use_dai=True) plt.title("(b)")#: Model-predicted time of emergence",fontsize=8) plt.subplot(313) hist_start = cdtime.comptime(1900,1,1) times = np.arange(10,2100-start_year) for X in ["ALL","ANZDA","MADA","MXDA","NADA","OWDA"]: getattr(D,X).time_of_emergence(cdtime.comptime(start_year,1,1),times=times,color=colorregions(X),uncertainty="shade") certain=stats.norm.interval(.99)[1] plt.axhline(certain,lw=1,color=cm.Reds(.99),label="Virtually Certain", alpha=.5) plt.title("(c)")#: Time of Emergence (assuming "+str(start_year)+" start)",fontsize=8) ax1,ax2,ax3=plt.gcf().axes ax2.set_xlim(1900,2055) ax2.set_ylim(-3,7) for ax in [ax1,ax2,ax3]: ax.set_xlabel(ax.get_xlabel(),fontsize=8) ax.set_ylabel(ax.get_ylabel(),fontsize=8) ax3.set_xlim(start_year+10,start_year+55) ax2.legend(fontsize=6) ax3.legend(fontsize=6) ax1.legend(fontsize=6) for ax in plt.gcf().axes: plt.setp(ax.get_xticklabels(),fontsize=6) plt.setp(ax.get_yticklabels(),fontsize=6) plt.setp(ax.xaxis.get_label(),fontsize=6) plt.setp(ax.yaxis.get_label(),fontsize=6)
def recup_nfs_sst_seviri_recent(self, URL_SEVIRI_DATA, YYYY, MM, DD, timerange): """ Recuperation par NFS d un fichier de SST SEVIRI horaire recent (depuis 01/01/2012)""" import os, subprocess, shutil import cdtime, glob from vacumm.misc.atime import strftime # -- Complement du nom de repertoire avec l'annee et le jour relatif (001-> 365) DATA_DIR = os.path.join(URL_SEVIRI_DATA, str(YYYY)) #print DATA_DIR a = cdtime.comptime(YYYY, MM, DD) a2 = a.torel('days since ' + strftime('%Y-%m-%d', cdtime.comptime(YYYY - 1, 12, 31))) DATA_DIR = os.path.join(DATA_DIR, '%(#)03d' % {'#': a2.value}) #print DATA_DIR if timerange == 'midnight': filename = '%(#)04d%(##)02d%(####)02d00*' % { '#': YYYY, '##': MM, '####': DD } else: filename = '%(#)04d%(##)02d%(####)02d*' % { '#': YYYY, '##': MM, '####': DD } copy_mode = 'nfs' list_file = glob.glob(os.path.join(DATA_DIR, filename)) # si la liste est vide, on essaie en se connectant a service7 if not list_file: find_cmd = 'ssh service7 "find %(DATA_DIR)s -name \'%(filename)s\' "' % vars( ) list_file = subprocess.check_output(find_cmd, shell=True).strip().split() if list_file: copy_mode = 'scp' for file_to_read in list_file: if os.path.isfile(os.path.basename(file_to_read)) == False: if copy_mode == 'nfs': shutil.copyfile(file_to_read, os.path.basename(file_to_read)) if copy_mode == 'scp': copy_cmd = 'scp caparmor-sftp.ifremer.fr:%(file_to_read)s .' % vars( ) subprocess.check_call(copy_cmd, shell=True)
def pdsi_SN_figure(D,start_time=None,stop_time=None,use_dai=True): noise_cru=[] noise_dai=[] noise_tree=[] noise=[] signal_cru=[] signal_dai=[] signal_tree=[] if start_time is None: start_time=cdtime.comptime(1981,1,1) if stop_time is None: stop_time=cdtime.comptime(2017,12,31) stop_cru=cdtime.comptime(2017,12,31) stop_dai=cdtime.comptime(2014,12,31) start_cru = cdtime.comptime(1901,1,1) start_dai=cdtime.comptime(1901,1,1) pcru=D.ALL.project_cru_on_solver(start=start_cru) pdai=D.ALL.project_dai_on_solver(start=start_dai) start_tree=cdtime.comptime(1400,1,1) stop_tree=cdtime.comptime(1975,12,31) nt=stop_cru.year-start_time.year nmodel=D.ALL.P.shape[0] H85=np.ma.zeros((nmodel,nt)) t=start_time.add(1,cdtime.Years) i=0 cru_time=[] tree_time=[] dai_time=[] while t.cmp(stop_time)<0: L=t.year-start_time.year+1 modslopes,noiseterm = D.ALL.sn_at_time(start_time,L) H85[:,i] = modslopes noise+=[np.std(noiseterm)] if (t.cmp(stop_cru)<=0) and (t.cmp(start_cru)>0): signal_cru += [float(cmip5.get_linear_trends(pcru(time=(start_time,t))))] cru_time+=[t.year] noise_cru += [np.std(noiseterm)] if (t.cmp(stop_dai)<=0) and (t.cmp(start_dai)>0): signal_dai += [float(cmip5.get_linear_trends(pdai(time=(start_time,t))))] dai_time+=[t.year] noise_dai += [np.std(noiseterm)] if t.cmp(stop_tree)<=0: signal_tree += [float(cmip5.get_linear_trends(D.ALL.projection(time=(start_time,t))))] tree_time +=[t.year] noise_tree += [np.std(noiseterm)] t=t.add(1,cdtime.Years) i+=1 timex=np.arange(start_time.year+1,start_time.year+1+nt) #for i in range(nmodel): # plt.plot(timex,H85[i]/np.array(noise),c="k",lw=1,alpha=.2) plt.plot(cru_time,np.array(signal_cru)/np.array(noise_cru),label="CRU",color=get_dataset_color("cru"),lw=3) if use_dai: plt.plot(dai_time,np.array(signal_dai)/np.array(noise_dai),label="Dai",color=get_dataset_color("dai"),lw=3) plt.plot(tree_time,np.array(signal_tree)/np.array(noise_tree),label="Tree Rings",color=get_dataset_color("tree"),lw=3)
def rappatrie(self, cfg=None): """ Rappatrie par ftp les donnees SEVIRI SST """ import os, subprocess, cdtime # ---------------------------------------------------- print "" print "---------- RECUPERATION FICHIERS SEVIRI ----------" print "" # ---------------------------------------------------- # ------------------------------------------------------------- # ------- recuperation des donnees : generalites (site ftp, etc) if cfg is None: config = ConfigParser.RawConfigParser() config.read(os.path.join(self.SCRIPT_DIR, "config.cfg")) try: if cfg is None: timerange = config.get("Seviri SST", "timerange") else: timerange = cfg["Seviri SST"]["timerange"] # timerange = 'midnight' pour donnees a minuit seulement # timerange = 'all' pour donnees a minuit seulement except ConfigParser.NoOptionError: print "No Time Range" timerange = "all" # Par defaut, lecture de toutes les heures if self.ctdeb >= cdtime.comptime(2012, 01, 01, 0, 0, 0): print timerange print "Data moved to /home5/taveeg/cache/project/osi-saf/data/sst/l3c/seviri/osi-saf/" EXT_SEVIRI = ".nc" if cfg is None: URL_SEVIRI_DATA = config.get("Seviri SST", "recent_dir") URL_CERSAT = config.get("Seviri SST", "url_cersat_rt") DATA_DIR = config.get("Seviri SST", "data_dir_rt") # URL_SEVIRI_DATA=os.path.join(URL_CERSAT,DATA_DIR) usr = config.get("Seviri SST", "user") pwd = config.get("Seviri SST", "pwd") else: URL_SEVIRI_DATA = cfg["Seviri SST"]["recent_dir"] URL_CERSAT = cfg["Seviri SST"]["url_cersat_rt"] DATA_DIR = cfg["Seviri SST"]["data_dir_rt"] # URL_SEVIRI_DATA=os.path.join(URL_CERSAT,DATA_DIR) usr = cfg["Seviri SST"]["user"] pwd = cfg["Seviri SST"]["pwd"] # -- recuperation des donnees par NFS # ---------------------------------------------------- ctest = self.ctdeb while ctest <= self.ctfin: # Pour le FTP fichiers telecharges corrompus ... pb BZ2 # Seviri.recup_ftp_sst_seviri_recent(self,URL_CERSAT,DATA_DIR,ctest.year,ctest.month,ctest.day, EXT_SEVIRI, usr,pwd, '9') Seviri.recup_nfs_sst_seviri_recent(self, URL_SEVIRI_DATA, ctest.year, ctest.month, ctest.day, timerange) # On incremente le temps "test" de 1 jour ctest = ctest.add(1, cdtime.Days)
def SignalToNoise(D,fortalk=False): if fortalk: plt.figure() else: plt.subplot(211) time_of_emergence_figure(D,noisestart=cmip5.start_time(D.ALL.obs)) plt.title("(a): Time of emergence for PDSI signal") plt.xlim(1985,2050) plt.legend(ncol=2) if fortalk: plt.figure() ax=plt.subplot(111) else: ax=plt.subplot(212) start_time = cdtime.comptime(1981,1,1) ALL_SM = soil.SoilMoisture(D.ALL.obs.mask[0]) ALL_SM.time_of_emergence(start_time,"30cm",ax=ax,color=cm.Set1(1/2.)) ALL_SM.time_of_emergence(start_time,"2m",ax=ax,color=cm.Set1(2/2.)) plt.xlim(1985,2050) plt.title("(b): Times of emergence for soil moisture metrics") #noisefigure(D) plt.legend() plt.title("(b): Preindustrial \"noise\" terms")
def var_pdf_daily(var, season, years): "Calculate diurnal cycle climatology of each variable" if season == 'JJA': mo0 = 6 if season == 'SON': mo0 = 9 if season == 'DJF': mo0 = 12 if season == 'MAM': mo0 = 3 var_da_year = np.empty([len(years),90])*np.nan for iy,year in enumerate(years): t1 = cdtime.comptime(year,mo0,0o1) t2 = t1.add(90,cdtime.Days) # try: var_yr = var(time=(t1,t2,'co')) var_da_year[iy,:]= var_yr if var.id == 'tas': var_da_year[iy,:] = var_da_year[iy,:]-273.15 if var.id == 'pr': var_da_year[iy,:] = var_da_year[iy,:]*3600.*24. # except: # print str(year) +' not Available!' # var_da_year[iy,:] = np.nan var_da = np.reshape(var_da_year, (90*len(years))) return var_da
def forTable(D): aerosol_start = cdtime.comptime(1950,1,1) aerosol_stop = cdtime.comptime(1975,12,31) hist_start = cdtime.comptime(1900,1,1) hist_stop = cdtime.comptime(1949,12,31) with open("../DroughtAtlasPaper/FIGS/ForPaper/SI/Table1.csv","w") as fw: csvwriter=csv.writer(fw,delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) d={} d for X in ["ANZDA","MADA","MXDA","NADA","OWDA","NHDA","NoNADA","NoMEX"]: towrite=getattr(D,X).obs_SN(hist_start,stop_time=hist_stop,plot=False)+getattr(D,X).obs_SN(aerosol_start,stop_time=aerosol_stop,plot=False) towrite2 = [X]+(map(str,towrite)) csvwriter.writerow(towrite2)
def read_Marshall(infile, metric, time_start): """Read the data from http://www.antarctica.ac.uk/met/gjma/sam.html""" SAM_index = pandas.read_csv(infile, skiprows=2, delim_whitespace=True, header=0, index_col=0, names=range(1, 13)) output_data = [] output_times = [] for year, data in SAM_index.iterrows(): for month in data.keys(): if not numpy.isnan(data[month]): output_data.append(data[month]) ct = cdtime.comptime(year, month, 15) rt = ct.torel(time_start).value output_times.append(rt) output_atts = { 'id': 'SAM', 'standard_name': 'SAM', 'long_name': 'Southern Annular Mode index', 'units': '', 'notes': 'SAM index (Marshall, 2003)' } history_dict = { 'history': datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ': downloaded from http://www.antarctica.ac.uk/met/gjma/sam.html \n' } return numpy.array(output_data), output_times, output_atts, history_dict
def two_times_from_one(t): """Input is a time representation, either as the long int used in the cdscan script, or a string in the format "2010-08-25 15:26:00", or as a cdtime comptime (component time) object. Output is the same time, both as a long _and_ as a comptime.""" if t == 0: t = 0L if isinstance(t, str): t = cdtime.s2c(t) if (isinstance(t, long) or isinstance(t, int)) and t > 1000000000L: tl = t year = tl / 1000000000L rem = tl % 1000000000L month = rem / 10000000L rem = rem % 10000000L day = rem / 100000 allsecs = rem % 100000 sec = allsecs % 60 allmins = allsecs / 60 min = allmins % 60 hour = allmins / 60 tc = cdtime.comptime(year, month, day, hour, min, sec) else: # I'd like to check that t is type comptime, but although Python # prints the type as <type 'comptime'> it won't recognize as a type # comptime or anything similar. Note that cdtime.comptime is a C # function available from Python. tc = t tl = tc.year * 1000000000L tl += tc.month * 10000000L tl += tc.day * 100000 tl += tc.hour * 3600 tl += tc.minute * 60 tl += tc.second.__int__() return tl, tc
def checkDatawc(self,name,value): checkName(self,name,value) if isNumber(value): value = float(value), 0 elif isinstance(value,str): t=cdtime.s2c(value) if t!=cdtime.comptime(0,1): t=t.torel(self.datawc_timeunits,self.datawc_calendar) value = float(t.value), 1 else: raise ValueError, 'The '+name+' attribute must be either an integer or a float value or a date/time.' elif type(value) in [type(cdtime.comptime(1900)),type(cdtime.reltime(0,'days since 1900'))]: value = value.torel(self.datawc_timeunits,self.datawc_calendar).value, 1 else: raise ValueError, 'The '+name+' attribute must be either an integer or a float value or a date/time.' return value
def createObsRainfallData(today): outdir = os.path.join(outpath, today) if not os.path.exists(outdir): os.makedirs(outdir) os.chdir(outdir) cDay = datetime.datetime.strptime(today, "%Y%m%d") tDay = cDay.strftime('%d%m%Y') mDay = cDay.strftime('%d%b%Y').lower() infile = '/gpfs4/home/akmitra/tmp/mrgdr/GPM%s.grd' % tDay if not os.path.isfile(infile): return ctlfilename = 'IMDGPM_%s.ctl' % today f = open(ctlfilename, 'w') f.write(ctltemplate % (tDay, mDay)) f.close() f = cdms2.open(ctlfilename) rain = f('arf25', latitude=(5, 40), longitude=(65, 110)) rain.id = 'rainfall' rain.comments = 'IMD GPM Merged Rainfall on ' + today tunits = 'seconds since 1970-01-01 00:00:00.0' csec = cdtime.comptime(cDay.year, cDay.month, cDay.day, 3).torel(tunits).value rtime = cdms2.createAxis(numpy.array([csec]), id='time') #60*24*3 sec for 3hr rtime.units = tunits rtime.designateTime() rain.setAxis(0, rtime) rnc = 'rf_%s.nc' % today f2 = cdms2.open(rnc, 'w') f2.write(rain) f2.close() f.close() cmd = 'rm ' + ctlfilename subprocess.call(cmd, shell=True) grads_cmd = """'sdfopen %s' 'define rainfall=rainfall' 'set sdfwrite -3dt -rt RFG_%s.nc' 'sdfwrite rainfall' 'quit' """ % (rnc, today) f3 = open('rainfall.gs', 'w') f3.write(grads_cmd) f3.close() cmd = grads + ' -blc rainfall.gs' subprocess.call(cmd, shell=True) cmd = 'rm rainfall.gs %s ' % rnc subprocess.call(cmd, shell=True) cmd = 'mv RFG_%s.nc IMD_GPM_Obs_Rainfall_%s.nc ' % (today, today) subprocess.call(cmd, shell=True)
def concatenate_this(piC,modaxis=0,compressed=False): if not ("time" in piC.getAxisIds()): print("Need a time axis to concatenate along") raise TypeError if compressed: axlist = piC.getAxisList() todel=[] modax = piC.getAxis(modaxis) allfiles=eval(modax.models) if len(allfiles)!=piC.shape[modaxis]: allfiles =[x+".xml" for x in allfiles[0].split(".xml")[:-1]] allfiles=np.array(allfiles) #assume model axis is 0 for this for i in range(len(allfiles)): if len(piC[i].compressed()) != len(piC[i]): todel+=[i] piC = MV.array(np.delete(piC,todel,axis=modaxis)) allfiles = np.delete(allfiles,todel).tolist() newmodax=cmip5.make_model_axis(allfiles) piC.setAxisList([newmodax]+axlist[1:]) naxes = len(piC.shape) timeaxis = piC.getAxisIds().index("time") dimensions=piC.shape nmodc = dimensions[modaxis] ntc = dimensions[timeaxis] newdim = (nmodc*ntc,) units = 'days since 0001-1-1' start = cdtime.comptime(1,1,1) tax = cdms.createAxis(np.arange(0,nmodc*ntc*365,365)+15.5) #tax = cdms.createAxis([start.add(i,cdtime.Months).torel(units).value for i in range(ntc*nmodc)]) tax.units = units tax.id = "time" tax.designateTime() newaxes = [tax] if len(dimensions)>2: for i in range(len(dimensions)): if (i != timeaxis) and (i!= modaxis): newdim+=(dimensions[i],) newaxes+=[piC.getAxis(i)] piC_concatenate = piC.reshape(newdim) piC_concatenate.setAxisList(newaxes) return piC_concatenate
def two_times_from_one( t ): """Input is a time representation, either as the long int used in the cdscan script, or a string in the format "2010-08-25 15:26:00", or as a cdtime comptime (component time) object. Output is the same time, both as a long _and_ as a comptime.""" if t==0: t = 0L if isinstance(t,str): t = cdtime.s2c(t) if (isinstance(t,long) or isinstance(t,int)) and t>1000000000L : tl = t year = tl / 1000000000L rem = tl % 1000000000L month = rem / 10000000L rem = rem % 10000000L day = rem / 100000 allsecs = rem % 100000 sec = allsecs%60 allmins = allsecs/60 min = allmins%60 hour = allmins/60 tc = cdtime.comptime(year,month,day,hour,min,sec) else: # I'd like to check that t is type comptime, but although Python # prints the type as <type 'comptime'> it won't recognize as a type # comptime or anything similar. Note that cdtime.comptime is a C # function available from Python. tc = t tl = tc.year * 1000000000L tl += tc.month * 10000000L tl += tc.day * 100000 tl += tc.hour * 3600 tl += tc.minute *60 tl += tc.second.__int__() return tl,tc
def post(self,fetched,slab,axes,specifications,confined_by,aux,axismap): ''' Post processing retouches the bounds and later will deal with the mask''' import cdms2 as cdms fetched=cdms.createVariable(fetched,copy=1) faxes=fetched.getAxisList() a=None for i in range(len(faxes)): if confined_by[i] is self: newaxvals=[] bounds=[] a=None sh=list(fetched.shape) sh[i]=1 for l in self.aux[i]: try: tmp=fetched(**{faxes[i].id:(l,l)}) ax=tmp.getAxis(i) #print ax newaxvals.append(ax[0]) if ax.getBounds()!=None: bounds.append(ax.getBounds()[0]) else: bounds=None except Exception,err: #print 'err:',err,'match:',self.match if self.match==1: raise Exception,'Error axis value :'+str(l)+' was requested but is not present in slab\n(more missing might exists)' elif self.match==0: tmp=MV2.ones(sh,typecode=MV2.float) tmp=MV2.masked_equal(tmp,1) if type(l)==type(cdtime.comptime(1999)) or type(l)==type(cdtime.reltime(0,'days since 1999')) or type(l)==type(''): if type(l)!=type(''): newaxvals.append(l.torel(faxes[i].units).value) else: newaxvals.append(cdtime.s2r(l,faxes[i].units).value) else: newaxvals.append(l) if bounds is not None: bounds.append([ax[-1]-1.,ax[-1]+1]) else: tmp=None if not tmp is None: if a is None: a=tmp elif not tmp is None: a=MV2.concatenate((a,tmp),i) if bounds is not None: newax=cdms.createAxis(numpy.array(newaxvals),bounds=numpy.array(bounds),id=ax.id) else: newax=cdms.createAxis(numpy.array(newaxvals),id=ax.id) for att in faxes[i].attributes.keys(): setattr(newax,att,faxes[i].attributes.get(att)) for j in range(len(fetched.shape)): if j==i: a.setAxis(i,newax) else: a.setAxis(j,faxes[j]) fetched=a.astype(fetched.dtype.char) faxes=fetched.getAxisList()
def getCompTime(cls, str_time): try: if str_time: itime = [ int(float(tok)) for tok in str_time.replace("-", " ").replace(":", " ").replace(",", " ").split() ] return cdtime.comptime(*itime) except Exception, err: print >>sys.stderr, "Error parsing time string '%s': %s" % (str_time, str(err))
def __init__(self): import cdtime SCRIPT_DIR = os.getcwd() #print SCRIPT_DIR #os.chdir('../') #BASENAME = os.getcwd() #print BASENAME #os.chdir(SCRIPT_DIR) # back to the script_dir #self.WORKDIR = os.path.join(BASENAME, 'work') # repertoire de travail #print self.WORKDIR self.SCRIPT_DIR = SCRIPT_DIR # Lecture de la periode de validation config = ConfigParser.RawConfigParser() config.read(os.path.join(SCRIPT_DIR, 'config.cfg')) andeb = config.getint('Time Period', 'andeb') anfin = config.getint('Time Period', 'anfin') mdeb = config.getint('Time Period', 'mdeb') mfin = config.getint('Time Period', 'mfin') jdeb = config.getint('Time Period', 'jdeb') jfin = config.getint('Time Period', 'jfin') hdeb = config.getint('Time Period', 'hdeb') hfin = config.getint('Time Period', 'hfin') self.WORKDIR = config.get('Env', 'workdir') # Conversion en "Component Time" self.ctdeb = cdtime.comptime(andeb, mdeb, jdeb, hdeb, 0, 0) self.ctfin = cdtime.comptime(anfin, mfin, jfin, hfin, 0, 0) DIR_REC = os.path.join(self.WORKDIR, 'RECOPESCA') # repertoire de stockage des donnees if os.path.isdir(DIR_REC) == False: os.chdir(self.WORKDIR) os.mkdir('RECOPESCA') self.WORKDIR = os.path.join(self.WORKDIR, 'RECOPESCA') self.name = "RECOPESCA data" self.shortname = "Recopesca" #dictionnaire contient les profils (cle : date) self.map_profiles = {}
def add_variable( self, varId, variable, **args ): if self.time <> None: data_start = self.time['start'].split('-') part_time_step = self.time.get('step',1) part_time_units = get_cdtime_units( self.time['units'] ) data_start_ct = cdtime.comptime( *[int(tok) for tok in data_start] ) partition_start_ct = data_start_ct.add( self.pIndex*part_time_step, part_time_units ) partition_end_ct = partition_start_ct.add( part_time_step, part_time_units ) wpsLog.info( 'Domain[%d]: addVariable: %s -> %s' % (self.pIndex, str(partition_start_ct), str(partition_end_ct) )) part_variable = variable( time=( partition_start_ct, partition_end_ct, 'co') ) self.variables[varId] = part_variable
def checkTimeUnits(self, name, value): checkName(self, name, value) if not isinstance(value, str): raise ValueError, "time units must be a string" a = cdtime.reltime(1, "days since 1900") try: a.torel(value) except: raise ValueError, value + " is invalid time units" sp = value.split("since")[1] b = cdtime.s2c(sp) if b == cdtime.comptime(0, 1): raise ValueError, sp + " is invalid date" return value
def checkTimeUnits(self,name,value): checkName(self,name,value) if not isinstance(value,str): raise ValueError, 'time units must be a string' a=cdtime.reltime(1,'days since 1900') try: a.torel(value) except: raise ValueError, value+' is invalid time units' sp=value.split('since')[1] b=cdtime.s2c(sp) if b==cdtime.comptime(0,1): raise ValueError, sp+' is invalid date' return value
def parse(value): parts = value.split(" ") if len(parts) == 1: # It's just a date date = value time = "0:0:0" else: date, time = parts # Parse date date_parts = date.split("-") num_date = [int(d) for d in date_parts if d != ''] num_date += [0 for _ in range(3 - len(num_date))] year, month, day = num_date time_parts = time.split(":") num_time = [int(t) for t in time_parts if t != ''] num_time += [0 for _ in range(3 - len(num_time))] hour, minute, second = num_time # Check if the units match up with the specificity if time_increment[0:6] == "second": if 0 in (year, month, day, hour, minute, second): return None elif time_increment[0:6] == "minute": if 0 in (year, month, day, hour, minute): return None elif time_increment[0:4] == "hour": if 0 in (year, month, day, hour): return None elif time_increment[0:3] == "day" or time_increment[0:4] == "week": if 0 in (year, month, day): return None elif time_increment[0:5] == "month" or time_increment[0:6] == "season": if 0 in (year, month): return None elif time_increment[0:4] == "year": if 0 in (year): return None try: comptime = cdtime.comptime(year, month, day, hour, minute, second) reltime = comptime.torel(units, calendar) return reltime.value except: return None
def generateTime(matchobj,timespecs): iyr = 0 imo = 1 idy = 1 ihr = 0 imi = 0 ise = 0 yrspec,mospec,dyspec,hrspec,mispec,sespec = timespecs if yrspec: pat,name,dimtype,pos = _specifierMap[yrspec] yrstr = matchobj.group(name) iyr = string.atoi(yrstr) # Map 2-digit year to [1950,2049) if yrspec in ('%y','%ey'): if iyr<50: iyr = iyr+2000 else: iyr = iyr+1900 if mospec: pat,name,dimtype,pos = _specifierMap[mospec] mostr = matchobj.group(name) if mospec in ('%G','%eG'): imo = _monthMapUpper[mostr] elif mospec in ('%g','%eg'): imo = _monthMapLower[mostr] elif mospec in ('%m','%em','%n','%en'): imo = string.atoi(mostr) if dyspec: pat,name,dimtype,pos = _specifierMap[dyspec] dystr = matchobj.group(name) idy = string.atoi(dystr) if hrspec: pat,name,dimtype,pos = _specifierMap[hrspec] hrstr = matchobj.group(name) ihr = string.atoi(hrstr) if mispec: pat,name,dimtype,pos = _specifierMap[mispec] mistr = matchobj.group(name) imi = string.atoi(mistr) if sespec: pat,name,dimtype,pos = _specifierMap[sespec] sestr = matchobj.group(name) ise = string.atoi(sestr) return cdtime.comptime(iyr,imo,idy,ihr,imi,ise)
error = True if error == False: raise Exception,"Error it should have failed here!" s=f('ta',slice(0,1),genutil.picker(level=levels,match=0)) if s.shape[1]!=3: raise "Error did not return 3 levels!" if (s.getLevel()[:]!=levels).any(): raise Exception,"Error did not retrieve the right levels!" print "folowing plot should show all missing values, since 800 does not exisits!" x=vcs.init() x.plot(s[0,-1],bg=bg) vcs.test.support.check_plot(x) levels = [1000,700,850] s3=f('ta',genutil.picker(time=['1987-7','1988-1',cdtime.comptime(1988,3)],level=[1000,700,850])) if s3.shape!=(3, 3, 73, 144): raise Exception,"Did not retrieve the right slab" t1= cdtime.componenttime(1987,7) t2= cdtime.componenttime(1988,1) t3= cdtime.componenttime(1988,3) if s3.getTime().asComponentTime()!=[t1,t2,t3]: raise Exception,"Error did not get the right times" test = s3.getLevel()[:]!=levels if test.any(): raise Exception,"Error did not get the right levels"
import vcs,cdms2,os,sys,cdtime import testing.regression as regression f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc")) s=f("clt",squeeze=1) x=regression.init() x.plot(s,bg=1,time=cdtime.comptime(2015)) fnm = os.path.split(__file__)[1][:-3]+".png" regression.run(x, fnm)
print "Step #0 : Reading data" s=f(var,longitude=(0,360,'co')) acok=fsc('climseas',longitude=(0,360,'co')) print 'Test #1 : Test result' ac=times.JAN.climatology(s) assert(MV2.allclose(ac[0],acok[0])) f.close() fsc.close() a=cdtime.comptime(1980) b=cdtime.comptime(1980,5) f = cdms2.open(os.path.join(sys.prefix,'sample_data','tas_6h.nc')) s=f(var,time=(a,b,'co'),squeeze=1) print "Test #2 : 6hourly AND get" jans=times.JAN(s) print "Test #3 : climatology 6h" JFMA=times.Seasons('JFMA') jfma=JFMA.climatology(s) #Test reorder print "Test #4 : time not first axis"
# Adapted for numpy/ma/cdms2 by convertcdms.py import cdms2 as cdms import vcs import cdtime import support import os bg = support.bg t0 = cdtime.comptime(1987, 8) t1 = cdtime.comptime(1987, 12) f = cdms.open(os.path.join(vcs.sample_data, 'ta_ncep_87-6-88-4.nc')) s = f( 'ta', latitude=slice( 5, 6), level=slice( 0, 1), longitude=slice( 6, 7), squeeze=1) s2 = s() t2 = s2.getTime() t2.units = 'months since 1949-2' x = vcs.init() y = vcs.init() b = x.createyxvsx('new2') b.datawc_x1 = t0 b.datawc_x2 = t1 x.plot(s, b, bg=bg) support.check_plot(x) y.plot(s2, b, bg=bg)
# # Example 1: # We compute the spatial rms difference between 2 fields. The 2 fields # chosen here are the surface air temperature fields in the NCEP/NCAR # reanalysis for the 1960-1969 and 1980-1989 periods. # #******************************************************************************* # # First of all let us define our 2 periods of interest # import cdtime # # Period 1 with start at 1980 and end at 1985 # a1 = cdtime.comptime(1980) b1 = cdtime.comptime(1985) # # By default, the definition above sets the time at January 1st of the years # specified. # # Similarly, Period 2 will start at 1990 and end at 1995 # a2 = cdtime.comptime(1990) b2 = cdtime.comptime(1995) # # Let us retrieve data for surface air temperature (tas) # for each of these 2 periods we just defined. # ncep = os.path.join(sys.prefix, 'sample_data', 'tas_mo.nc')
def specify(self,slab,axes,specification,confined_by,aux): ''' First part: confine the slab within a Domain wide enough to do the exact in post''' import string,copy from numpy.ma import minimum,maximum # myconfined is for later, we can't confine a dimension twice with an argument plus a keyword or 2 keywords myconfined=[None]*len(axes) self.aux=copy.copy(specification) # First look at the arguments (i.e not keywords) and confine the dimensions # in the order of the arguments for i in range(len(self.args)): if confined_by[i] is None : # Check it hasn't been confined by somebody else myconfined[i]=1 # dim confined by argument list confined_by[i]=self # for cdms I want to confine this dimension self.aux[i]=specs=list(self.args[i]) # How do we want to confine this dim ? if type(specs)==type(slice(0)): specification[i]=specs # If it's a slicing nothing to do else: # But if it's not... if specs[0] is None: tmp=axes[i].getBounds() if tmp is None: raise ValueError, 'Region error, axis:'+axes[i].id+' has no bounds' specs[0]=minimum(minimum(tmp[0],tmp[-1])) if specs[1] is None: tmp=axes[i].getBounds() if tmp is None: raise ValueError, 'Region error, axis:'+axes[i].id+' has no bounds' specs[1]=maximum(maximum(tmp[0],tmp[-1])) if axes[i].isTime(): # Time is as always "Special" import cdtime tc=type(cdtime.comptime(0)) # component time type tr=type(cdtime.reltime(0,'months since 0')) # relative time type t=type(specs[0]) # my first spec type if t==type(''): #if my first spec is passed as a string specs[0]=cdtime.s2r(specs[0],axes[i].units) elif t==tc or t==tr: #if my first spec is passed as a cdtime object specs[0]=cdtime.torel(specs[0],axes[i].units) else: # If not it has to be that the users knows the time values in the axis pass t=type(specs[1]) # my second spec type if t==type(''): #if my second spec is passed as a string specs[1]=cdtime.s2r(specs[1],axes[i].units) elif t==tc or t==tr: #if my second spec is passed as a cdtime object specs[1]=cdtime.torel(specs[1],axes[i].units) sp=[specs[0],specs[1],'oob'] # Now retrieve the values wide enough for the exact specification[i]=sp # sets the specifications else: return 1 for kw in self.kargs.keys(): axis=None for i in range(len(axes)): if axes[i].id==kw : axis=i if axis is None: if kw=='time' : for i in range(len(axes)): if axes[i].isTime() : axis=i elif kw=='level' : for i in range(len(axes)): if axes[i].isLevel() : axis=i elif kw=='longitude' : for i in range(len(axes)): if axes[i].isLongitude() : axis=i elif kw=='latitude' : for i in range(len(axes)): if axes[i].isLatitude() : axis=i elif not kw in ['exact','atol','rtol']: # keyword not a recognised keyword or dimension name raise 'Error, keyword: '+kw+' not recognized' # At this point, if axis is None: # we are dealing with a keyword for the selector # so we'll skip it if not axis is None : if confined_by[axis] is None: confined_by[axis]=self myconfined[axis]=1 self.aux[axis]=specs=list(self.kargs[kw]) if type(specs)!=type(slice(0)): if specs[0] is None: tmp=axes[axis].getBounds() if tmp is None: raise ValueError, 'Region error, axis:'+axes[axis].id+' has no bounds' specs[0]=minimum(minimum(tmp[0],tmp[-1])) if specs[1] is None: tmp=axes[axis].getBounds() if tmp is None: raise ValueError, 'Region error, axis:'+axes[axis].id+' has no bounds' specs[1]=maximum(maximum(tmp[0],tmp[-1])) if axes[axis].isTime(): import cdtime tc=type(cdtime.comptime(0)) tr=type(cdtime.reltime(0,'months since 0')) t=type(specs[0]) if t==type(''): specs[0]=cdtime.s2r(specs[0],axes[i].units) elif t==tc or t==tr: specs[0]=cdtime.torel(specs[0],axes[i].units) t=type(specs[1]) if t==type(''): specs[1]=cdtime.s2r(specs[1],axes[i].units) elif t==tc or t==tr: specs[1]=cdtime.torel(specs[1],axes[i].units) sp=[specs[0],specs[1],'oob'] specification[axis]=sp else: specification[axis]=specs else: if myconfined[axis]==1: raise 'Error you are attempting to set the axis: '+str(axes[axis].id)+' more than once' else: return 1 return 0
import cdtime from eofs.cdms import Eof import vcs import string #=========================================================================================================== # DATA #----------------------------------------------------------------------------------------------------------- # Open file --- data_path = '/clim_obs/obs/ocn/mo/tos/UKMETOFFICE-HadISST-v1-1/130122_HadISST_sst.nc' ## Put your file here f = cdms.open(data_path) # Set time period --- start_year = 1980 end_year = 2000 start_time = cdtime.comptime(start_year) end_time = cdtime.comptime(end_year) # Load variable --- d = f('sst',time=(start_time,end_time),longitude=(0,360),latitude=(-90,90)) # Provide proper variable name # Reomove annual cycle --- d_anom = cdutil.ANNUALCYCLE.departures(d) # EOF (take only first variance mode...) --- solver = Eof(d_anom, weights='area') eof = solver.eofsAsCovariance(neofs=1) pc = solver.pcs(npcs=1, pcscaling=1) # pcscaling=1: scaled to unit variance # (divided by the square-root of their eigenvalue) frac = solver.varianceFraction()
# if var and var.code == ilookup[ITEM_CODE]: # Then this is another level of the same variable # Perhaps should just pass the full lookup array? # Should this return a unique list of names ??? npts = ilookup[LBNPT] nrows = ilookup[LBROW] if not timeunits: # Should be hidden as a function somewhere timeunits = "days since %4.4d-%2.2d-%2.2d %2.2d:%2.2d" % (ilookup[LBYR], ilookup[LBMON], ilookup[LBDAT], ilookup[LBHR], ilookup[LBMIN]) # Different variables may be saved with different steps # Really only need to do this the first time variable is seen. end = cdtime.comptime(ilookup[LBYRD], ilookup[LBMOND], ilookup[LBDATD], ilookup[LBHRD], ilookup[LBMIND]) period = end.torelative(timeunits) step = period.value/ilookup[LBROW] if verbose: print "TIME STEP (days)", step # Step will probably be some integer number of minutes step = round(1440*step,4) if verbose: print "TIME STEP (mins)", step # ilookup[LBCODE] is 31320 for Gregorian timeseries, 31323 for other calendar # rlookup[51] is level, -1 for single or special levels f.wordseek(lbegin) # Offset rather than absolute seek? s = f.wordread(npts*nrows) # Where is this format for grid point values defined? # Added by routine extra_ts_info