def provenance_dict(script_file_name=None): global provdic if len(provdic) >= 3: return provdic if script_file_name is None: for a in sys.argv: if a[-10:].lower().find("python") == -1: script_file_name = a break provdic['version'] = metrics.git.commit provdic['UVCDAT'] = "UV-CDAT: %s Metrics: %s (%s) script_sha1: %s" % ( '.'.join([str(x) for x in cdat_info.version()]), metrics.git.metrics_version, metrics.git.commit, hashfile(script_file_name)) # os.getlogin() fails if this is not a controlling shell, e.g. sometimes # when using mpirun I hit this. try: logname = os.getlogin() except: try: import pwd logname = pwd.getpwuid(os.getuid())[0] except: try: logname = os.environ.get('LOGNAME', 'unknown') except: print 'Couldnt determine a login name for provenence information' logname = 'unknown-loginname' provdic[ 'history'] = "%s: created by %s from path: %s with input command line: %s" % ( str(datetime.datetime.utcnow()), logname, os.getcwd(), " ".join( sys.argv)) return provdic
def __init__(self,parent): self.url='http://cdat.sf.net' self.doc="""This diagnostic is not documented yet""" self.type='class' self.programminglanguage="Python" self.author="PCMDI's software team" self.version= '.'.join(map(str,cdat_info.version())) self.codepath = str(type(parent)).split("'")[1] self.hide=['go','fromXml','toXml','toDOM','printXml','scanDocString','stripSectionsFromDoc','hide']
def __init__(self): super(LoadingSplash, self).__init__() splash_path = utils.data_file("resources/uv-cdat-splash.svg") import cdat_info with open(splash_path) as splash_file: splash = splash_file.read() cdat_version = cdat_info.version() font_size = 39 if isinstance(cdat_version[-1], int) else 22 cdat_version = ".".join([str(p) for p in cdat_version]) splash = splash.format(cdat_version=cdat_version, version_font=font_size, gui_version=QtGui.qApp.applicationVersion()) import tempfile _, path = tempfile.mkstemp() with open(path, 'w') as f: f.write(splash) pixmap = QtGui.QPixmap(path, "svg") os.remove(path) self.setPixmap(pixmap)
def provenance_dict( script_file_name=None ): global provdic if len(provdic)>=3: return provdic if script_file_name is None: for a in sys.argv: if a[-10:].lower().find("python")==-1: script_file_name = a break provdic['version'] = metrics.git.commit provdic['UVCDAT'] = "UV-CDAT: %s Metrics: %s (%s) script_sha1: %s" % ( '.'.join([str(x) for x in cdat_info.version()]), metrics.git.metrics_version, metrics.git.commit, hashfile(script_file_name)) # os.getlogin() fails if this is not a controlling shell, e.g. sometimes # when using mpirun I hit this. try: logname = os.getlogin() except: try: import pwd logname = pwd.getpwuid(os.getuid())[0] except: try: logname = os.environ.get('LOGNAME', 'unknown') except: logger.exeption('Couldnt determine a login name for provenence information') logname = 'unknown-loginname' provdic['history'] = "%s: created by %s from path: %s with input command line: %s" % ( str(datetime.datetime.utcnow()), logname, os.getcwd(), " ".join(sys.argv) ) return provdic
setattr(ann,k,d.attributes[k]) # Write out file global atts for k in f_in.attributes.keys(): setattr(f_out,k,f_in.attributes[k]) history = getattr(f_in,'history') # Write new file global atts f_out.institution = "Program for Climate Model Diagnosis and Intercomparison (LLNL)" f_out.data_contact = "Paul J. Durack; [email protected]; +1 925 422 5208" # Create timestamp, corrected to UTC for history local = pytz.timezone("America/Los_Angeles") time_now = datetime.datetime.now(); local_time_now = time_now.replace(tzinfo = local) utc_time_now = local_time_now.astimezone(pytz.utc) time_format = utc_time_now.strftime("%d-%m-%Y %H:%M:%S %p") f_out.history = "".join([history,'\n','File processed: ',time_format,' UTC; San Francisco, CA, USA']) f_out.host = "".join([host_name,'; CDAT version: ',"".join(["%s" % el for el in cdat_info.version()]),'; Python version: ',replace(replace(sys.version,'\n','; '),') ;',');')]) # Write new variable atts ann.comment = "Converted to annual from monthly mean data" # Write data to file, if successful close infile and outfile try: f_out.write(ann) f_out.close() f_in.close() ; # Source file kept open so attributes can be copied across nc_good = nc_good + 1 ; except: print "file write bombed onto next in loop" f_out.close() f_in.close() nc_bad4 = nc_bad4 + 1 ; if os.path.exists(outfile):
def version(): "Return the version number from the cdat_info script" v = [] for nb in cdat_info.version(): v.append(str(nb)) return ".".join(v)
att_dic[i]=att_keys[i],f_in.attributes[att_keys[i]] to_out = att_dic[i] setattr(f_out,to_out[0],to_out[1]) local = pytz.timezone("America/Los_Angeles") time_now = datetime.datetime.now(); local_time_now = time_now.replace(tzinfo = local) utc_time_now = local_time_now.astimezone(pytz.utc) time_format = utc_time_now.strftime("%d-%b-%Y %H:%M:%S %p") f_out.history="".join([" CRT[",time_format,"]: added axes bounds to data using UVCDAT, added standard_name to variable, and appended provenance information:",f_out.history]) f_out.data_provider = "NOAA Earth System Research Laboratory // NASA/Goddard Space Flight Center" f_out.Conventions = "CF-1.7" # f_out.references = "".join([f_out.references," // Adler, R.F., G.J. Huffman, A. Chang, R. Ferraro, P. Xie, J. Janowiak, B. Rudolf, U. Schneider, S. Curtis, D. Bolvin, A. Gruber, J. Susskind, and P. Arkin, 2003: The Version 2 Global Precipitation Climatology Project (GPCP) Monthly Precipitation Analysis (1979-Present). J. Hydrometeor., 4,1147-1167."]) f_out.references = " Adler, R.F., G.J. Huffman, A. Chang, R. Ferraro, P. Xie, J. Janowiak, B. Rudolf, U. Schneider, S. Curtis, D. Bolvin, A. Gruber, J. Susskind, and P. Arkin, 2003: The Version 2 Global Precipitation Climatology Project (GPCP) Monthly Precipitation Analysis (1979-Present). J. Hydrometeor., 4,1147-1167." f_out.institution = "NOAA Earth System Research Laboratory // Data processed at: Program for Climate Model Diagnosis and Intercomparison (LLNL)" f_out.contact = "Physical Sciences Division: Data Management, NOAA/ESRL/PSD, [email protected] // Processed by: Chris Terai; [email protected]; +1 925 422 8830" f_out.host = "".join([gethostname(),'; UVCDAT version: ',".".join(["%s" % el for el in cdat_info.version()]), '; Python version: ',replace(replace(sys.version,'\n','; '),') ;',');')]) f_out.write(dattable) ; print "".join(["** Finished processing: ",fi," **"]) filecount = filecount + 1; filecount_s = '%06d' % filecount f_index=f_index + 1 f_out.close() print 'got data from '+str(filecount)+' variables.' print " adding _FillValue attribute" call("".join(["ncatted -a _FillValue,PRECT,o,f,",str(var_missing_value)," ",outfile]), shell=True) call("".join(["ncatted -a _FillValue,'^bounds',d,, ",outfile]), shell=True)
def run_dir(test_dir,lst): lst.sort() passed=True output={} for test in lst: if test[-3:]=='.py' and (test.lower()[:4]=='test' or test.lower()[:6]=='cdtest'): Dict_all = get_shadow(test_dir,test) if o.query_mode: output[(test_dir,test)]=Dict_all try: fnm = os.path.join(test_dir,test)[:-3]+'.shadow' os.remove(fnm) except: pass continue myversion = ".".join(map(str,cdat_info.version())) dict_all = Dict_all.get(myversion,{}) myos = os.uname()[0] system = os.uname()[2] machine = os.uname()[4] dict_os = dict_all.get(myos,{}) dict_system = dict_os.get(system,{}) dict = dict_system.get(machine,{}) dict_system[machine] = dict dict_os[system] = dict_system dict_all[myos] = dict_os details = "" last = dict.get("last","1980-01-01 00:00:00") # ok ever ago! format = dict.get("format",default_time_format) tlast = time.strptime(last,format) delta = time.mktime(tlast)-time.mktime(time.strptime(o.date,o.format)) if delta>0: if o.verbose>0: print "\tRunning: %s" % (test) print "\t\tSuccessful run newer than threshold %s vs %s " % (last,o.date) continue if o.verbose>0: print "\tRunning: %s" % (test) if o.verbose<3 or dict_all.keys()==[]: details=make_tests_string_machine(machine,dict) else: details+=make_tests_string(dict_all) print details t = time.time() out,err= run_test(os.path.join(test_dir,test)) err2 = [] for l in err: if l.find("Warning")>-1: pass else: err2.append(l) err=err2 t2 = time.time() if err!=[]: passed = False if o.verbose>1: for l in out: st='\t\t%s' % l.strip() print st if o.verbose>0: if err!=[]: print '\t FAILED\n\n',err if o.verbose>1: for l in err: st='\t\t%s' % l.strip() print st else: print '\t PASSED\n\n' runtime = int(t2-t)+1 fastest = dict.get("fastest",runtime+1) if fastest>runtime: fastest = runtime dict["fastest"]=fastest slowest = dict.get("slowest",runtime-1) if slowest<runtime: slowest = runtime dict["slowest"]=slowest dict["format"]=default_time_format dict["last"] = time.strftime(default_time_format,time.localtime()) count=dict.get("count",0) count+=1 dict["count"]=count avg = dict.get("time",0.)*(count-1) avg+=runtime avg/=count dict["time"] = avg machines = dict.get("machines",[]) if int(o.upload)>1: mymachine = os.uname()[1] else: mymachine = "private" if not mymachine in machines: machines.append(mymachine) dict["machines"] = machines dict_system[machine] = dict dict_os[system] = dict_system dict_all[myos] = dict_os Dict_all[myversion] = dict_all output[(test_dir,test)]=dict if out==[] or str(out[-1]).lower().find('skipped')==-1: # ok the test havent been skipped # we can replace stat file set_shadow(test_dir,test,Dict_all) if o.skip is False and passed is False: sys.exit() return output
def globalAttWrite(file_handle,options): """ Documentation for globalAttWrite(): ------- The globalAttWrite() function writes standard global_attributes to an open netcdf specified by file_handle Author: Paul J. Durack : [email protected] Inputs: ----- | **file_handle** - a cdms2 open, writeable file handle Returns: ------- Nothing. Usage: ------ >>> from durolib import globalAttWrite >>> globalAttWrite(file_handle) Optional Arguments: ------------------- | option=optionalArguments | Restrictions: option has to be a string | Default : ... You can pass option='SOMETHING', ... Examples: --------- >>> from durolib import globalAttWrite >>> f = cdms2.open('data_file_name','w') >>> globalAttWrite(f) # Writes standard global attributes to the netcdf file specified by file_handle Notes: ----- ... """ import cdat_info # Create timestamp, corrected to UTC for history local = pytz.timezone("America/Los_Angeles") time_now = datetime.datetime.now(); local_time_now = time_now.replace(tzinfo = local) utc_time_now = local_time_now.astimezone(pytz.utc) time_format = utc_time_now.strftime("%d-%m-%Y %H:%M:%S %p") if 'options' in locals() and not options == None: if options.lower() == 'noid': file_handle.history = "".join(['File processed: ',time_format,' UTC; San Francisco, CA, USA']) file_handle.host = "".join([gethostname(),'; UVCDAT version: ',".".join(["%s" % el for el in cdat_info.version()]), '; Python version: ',replace(replace(sys.version,'\n','; '),') ;',');')]) else: print '** Invalid options passed, skipping global attribute write.. **' else: file_handle.data_contact = "Paul J. Durack; [email protected]; +1 925 422 5208" file_handle.history = "".join(['File processed: ',time_format,' UTC; San Francisco, CA, USA']) file_handle.host = "".join([gethostname(),'; UVCDAT version: ',".".join(["%s" % el for el in cdat_info.version()]), '; Python version: ',replace(replace(sys.version,'\n','; '),') ;',');')]) file_handle.institution = "Program for Climate Model Diagnosis and Intercomparison (LLNL), Livermore, CA, U.S.A."
def globalAttWrite(file_handle,options): """ Documentation for globalAttWrite(): ------- The globalAttWrite() function writes standard global_attributes to an open netcdf specified by file_handle Author: Paul J. Durack : [email protected] Returns: ------- Nothing. Usage: ------ >>> from durolib import globalAttWrite >>> globalAttWrite(file_handle) Where file_handle is a handle to an open, writeable netcdf file Optional Arguments: ------------------- option=optionalArguments Restrictions: option has to be a string Default : ... You can pass option='SOMETHING', ... Examples: --------- >>> from durolib import globalAttWrite >>> f = cdms2.open('data_file_name','w') >>> globalAttWrite(f) # Writes standard global attributes to the netcdf file specified by file_handle Notes: ----- When ... """ # Create timestamp, corrected to UTC for history local = pytz.timezone("America/Los_Angeles") time_now = datetime.datetime.now(); local_time_now = time_now.replace(tzinfo = local) utc_time_now = local_time_now.astimezone(pytz.utc) time_format = utc_time_now.strftime("%d-%m-%Y %H:%M:%S %p") file_handle.institution = "Department of Earth System Science, UC-Irvine" file_handle.data_contact = "Chris Terai; [email protected]" file_handle.history = "".join(['File processed: ',time_format,' UTC; Irvine, CA, USA']) file_handle.analysis_host = "".join([gethostname(),'; UVCDAT version: ',".".join(["%s" % el for el in cdat_info.version()]), '; Python version: ',replace(replace(sys.version,'\n','; '),') ;',');')])
import cdat_info Packages=[] OS=[] Versions=[] Machines=[] CDATVersions=[] #code to display nicely all the results if o.query_mode: for test in results.keys(): pnm =test[0] if not pnm in Packages: Packages.append(pnm) CDATVersions=results[test] oses = CDATVersions.get(str(cdat_info.version()),{}) for aos in oses.keys(): if not aos in OS: OS.append(aos) versions = oses[aos] for v in versions.keys(): syst = versions[v] for asys in syst: full = "%s_%s_%s" % (aos,v,asys) if not full in Versions: Versions.append(full) res = syst[asys] machines = res["machines"] for m in machines: if not m in Machines: Machines.append(m)
import sys,random,os,shutil,time,bz2,ftplib,time global counter,pause,diff import urllib import cdat_info version=".".join(map(str,cdat_info.version()[:2])) counter=0 bg=1 dogui=False pause = False diff = 1.25 # % of diff ok ftp_site = "climate.llnl.gov" ftp_dir = "SVG" ftp_user = "******" ftp_password = "******" keep_local = False def check_plot(x): global counter,pause,diff n =random.randint(0,1000) fnm = "tmp_%i.svg" % n while os.path.exists(fnm): n =random.randint(0,1000) fnm = "tmp_%i.svg" % n if pause: time.sleep(1) x.svg(fnm) try: os.makedirs("Good") except:
# Write new file global atts f_out.institution = "Program for Climate Model Diagnosis and Intercomparison (LLNL)" f_out.data_contact = "Paul J. Durack; [email protected]; +1 925 422 5208" # Create timestamp, corrected to UTC for history local = pytz.timezone("America/Los_Angeles") time_now = datetime.datetime.now() local_time_now = time_now.replace(tzinfo=local) utc_time_now = local_time_now.astimezone(pytz.utc) time_format = utc_time_now.strftime("%d-%m-%Y %H:%M:%S %p") f_out.history = "".join([ history, '\n', 'File processed: ', time_format, ' UTC; San Francisco, CA, USA' ]) f_out.host = "".join([ host_name, '; CDAT version: ', "".join(["%s" % el for el in cdat_info.version()]), '; Python version: ', replace(replace(sys.version, '\n', '; '), ') ;', ');') ]) # Write new variable atts ann.comment = "Converted to annual from monthly mean data" # Write data to file, if successful close infile and outfile try: f_out.write(ann) f_out.close() f_in.close() # Source file kept open so attributes can be copied across nc_good = nc_good + 1 except: print "file write bombed onto next in loop"
import sys, random, os, shutil, time, bz2, ftplib, time global counter, pause, diff import urllib import cdat_info version = ".".join(map(str, cdat_info.version()[:2])) counter = 0 bg = 1 dogui = False pause = False diff = 1.25 # % of diff ok ftp_site = "climate.llnl.gov" ftp_dir = "SVG" ftp_user = "******" ftp_password = "******" keep_local = False def check_plot(x): global counter, pause, diff n = random.randint(0, 1000) fnm = "tmp_%i.svg" % n while os.path.exists(fnm): n = random.randint(0, 1000) fnm = "tmp_%i.svg" % n if pause: time.sleep(1) x.svg(fnm) try: os.makedirs("Good")
#%% Load subset of variable f = [ '/p/css03/esgf_publish/CMIP6/CMIP/NCAR/CESM2/historical/r1i1p1f1/Omon/so/gn/v20190308/so_Omon_CESM2_historical_r1i1p1f1_gn_185001-201412.nc' ] # '/p/user_pub/xclim/CMIP6/CMIP/historical/ocean/mon/so/CMIP6.CMIP.historical.NCAR.CESM2.r1i1p1f1.mon.so.ocean.glb-l-gn.v20190308.0000000.0.xml'] # Try arbitrary time selections #times = [['1850','1851'],['2000','2005'],['1983','1984'],['2010','2011'],['1984','2013'],['1984','2015']] #times = [np.arange(1984,2015),[1984,2013]] #times = [np.arange(2013,2015),[1999,2014],[1984,2014]] # Try 15, then 29 years times = np.arange(1991, 1984, -1) hostName = gethostname() print('host:', hostName) print('Python version:', sys.version) print('cdat env:', sys.executable.split('/')[5]) print('cdat version:', cdat_info.version()[0]) print('*****') for timeSlot in times: for filePath in f: fH = cdm.open(filePath) print('filePath:', filePath.split('/')[-1]) # Loop through single years start = timeSlot end = 2014 #end = start+1 print('times:', start, end, '; total years:', (end - start) + 1) d1 = fH('so', time=(str(start), str(end))) print("Array size: %d Mb" % ((d1.size * d1.itemsize) / (1024 * 1024))) calcAve(d1) del (d1) fH.close()
def version(): "Return the version number from the cdat_info script" v=[] for nb in cdat_info.version(): v.append(str(nb)) return ".".join(v)