def compute_mean_z(): """docstring for compute_mean_z""" print("Computing mean levels") print("loading files:") print(glob.glob(file_search)) z = mygis.read_files(file_search, "z", axis=0) mygis.write("annual_mean_z.nc", z.mean(axis=0), varname="z") for month in range(12): print("Month " + str(month + 1)) curz = z[0] * 0 nyears = 0 month_start = start_day_per_month[month] start_point = month_start * times_per_day month_end = start_day_per_month[month + 1] end_point = month_end * times_per_day while start_point < z.shape[0]: curz += z[start_point:end_point].mean(axis=0) nyears += 1 start_point += times_per_day * days_per_year end_point += times_per_day * days_per_year print(nyears) curz /= nyears mygis.write(zoutputfile.format(month + 1), curz, varname="z")
def write_file(fname,data, varname, varatts): """docstring for write_file""" outputdata=np.zeros((len(data),data[0].shape[0],data[0].shape[1])) for i in range(len(data)): outputdata[i]=data[i] mygis.write(fname,outputdata, dtype='f', dims=("time","lat","lon"), varname=varname, attributes=varatts, extravars=space_time_vars, global_attributes=global_attributes)
def load_wrf_data(stat,time): """Load WRF data and calculate the relevant statistic""" output_wrf_file=stat+"_"+time+".nc" try: current=mygis.read_nc(wrfloc+"current_"+output_wrf_file).data future=mygis.read_nc(wrfloc+"future_"+output_wrf_file).data print("WRF data loaded from pre-calculated summary files") except: print(" Reading raw data...") if time=="annual": current=np.concatenate(mygis.read_files(wrfloc+"daily_NARR*")) future=np.concatenate(mygis.read_files(wrfloc+"daily_PGW*")) else: month=time[-2:] current=np.concatenate(mygis.read_files(wrfloc+"daily_NARR*"+month+".nc")) future=np.concatenate(mygis.read_files(wrfloc+"daily_PGW*"+month+".nc")) print(" Generating statistics") if stat=="MAP": ndays=calc_ndays(time) current=current.mean(axis=0)*ndays future=future.mean(axis=0)*ndays elif stat=="wetfrac": current=calc_wetfrac(current) future=calc_wetfrac(future) else: raise KeyError("stat not created for WRF:"+stat) print(" Writing stat for future reference") mygis.write(wrfloc+"current_"+output_wrf_file,current) mygis.write(wrfloc+"future_"+output_wrf_file,future) # if stat=="MAP": # current[current<1e-4]=1e-4 return (future-current),current
def compute_mean_z(): """docstring for compute_mean_z""" print("Computing mean levels") print("loading files:") print(glob.glob(file_search)) z=mygis.read_files(file_search,"z",axis=0) mygis.write("annual_mean_z.nc",z.mean(axis=0),varname="z") for month in range(12): print("Month "+str(month+1)) curz=z[0]*0 nyears=0 month_start=start_day_per_month[month] start_point=month_start*times_per_day month_end=start_day_per_month[month+1] end_point=month_end*times_per_day while start_point<z.shape[0]: curz+=z[start_point:end_point].mean(axis=0) nyears+=1 start_point+=times_per_day*days_per_year end_point+=times_per_day*days_per_year print(nyears) curz/=nyears mygis.write(zoutputfile.format(month+1),curz,varname="z")
def write_monthly_pgw(current,future,geofile): """write out a file containing the ratios future/current for each month and lat/lon data""" print("writing PGW results") if esgvarname=="pr": output_data=future/current outputfilename="PGW_ratio_file.nc" data_atts=Bunch(long_name="Ratio between future:current monthly precipitaton", units="mm/mm") else: output_data=future-current outputfilename="PGW_difference_file.nc" data_atts=Bunch(long_name="Difference between future-current monthly Temperature", units="K") lat=io.read_nc(geofile,"lat").data lon=io.read_nc(geofile,"lon").data time=np.arange(12) lat_atts =Bunch(long_name="latitude", units="degrees") lon_atts =Bunch(long_name="longitude", units="degrees") time_atts=Bunch(long_name="Month of the year",units="month",description="0=January,11=December,etc.") datadims=("time","lat","lon") evars=[ Bunch(data=lat, name="lat", dtype="f",attributes=lat_atts,dims=("lat",)), Bunch(data=lon, name="lon", dtype="f",attributes=lon_atts,dims=("lon",)), Bunch(data=time,name="time",dtype="f",attributes=time_atts,dims=("time",)), ] io.write(outputfilename,output_data,dtype="f",varname="data",dims=datadims,attributes=data_atts,extravars=evars)
def compute_mean_q(wind_option): print("Computing Monthly Mean fields") qvarlist=["qv","theta","p","u","v","rh"] if (wind_option == "nowind"): qvarlist=["qv","theta","p","rh"] full_data=[] for varname in qvarlist: print(varname) data=mygis.read_files(varname+"_cesm_*_*.nc",varname,axis=0) for month in range(12): print("Month "+str(month+1)) meanq=np.zeros(data.shape[1:]) nyears=0 month_start=start_day_per_month[month] start_point=month_start*times_per_day month_end=start_day_per_month[month+1] end_point=month_end*times_per_day while start_point<data.shape[0]: print(start_point,end_point) meanq+=data[start_point:end_point,...].mean(axis=0) nyears+=1 start_point+=times_per_day*days_per_year end_point+=times_per_day*days_per_year if nyears>0: meanq/=nyears print(nyears) mygis.write(qoutputfile.format(month+1,varname),meanq,varname=varname)
def compute_mean_q(wind_option): print("Computing Monthly Mean fields") qvarlist = ["qv", "theta", "p", "u", "v", "rh"] if (wind_option == "nowind"): qvarlist = ["qv", "theta", "p", "rh"] full_data = [] for varname in qvarlist: print(varname) data = mygis.read_files(varname + "_cesm_*_*.nc", varname, axis=0) for month in range(12): print("Month " + str(month + 1)) meanq = np.zeros(data.shape[1:]) nyears = 0 month_start = start_day_per_month[month] start_point = month_start * times_per_day month_end = start_day_per_month[month + 1] end_point = month_end * times_per_day while start_point < data.shape[0]: print(start_point, end_point) meanq += data[start_point:end_point, ...].mean(axis=0) nyears += 1 start_point += times_per_day * days_per_year end_point += times_per_day * days_per_year if nyears > 0: meanq /= nyears print(nyears) mygis.write(qoutputfile.format(month + 1, varname), meanq, varname=varname)
def main(start_date="19900101"): """convert a file (or files) from downscaling code to maps""" files=find_files(start_date) files.sort() geo=load_geo(global_basefile) times=mygis.read_nc(files[0],"time").data ntimes=times.size tmp=mygis.read_nc(files[0],"coefficient",returnNCvar=True) output_data=np.zeros((tmp.data.shape[1],ntimes,geo.lon.shape[0],geo.lon.shape[1])) tmp.ncfile.close() print(output_data.shape) for f in files: print(f) data=mygis.read_nc(f,"coefficient").data locations=get_xy(f,geo) for i in range(len(locations.x)): output_data[:,:,locations.y[i],locations.x[i]]=data[0,:,:,i] print("Writing output file") mygis.write(global_output_file,output_data,varname="coefficient",dtype="d",dims=("variable","time","latitude","longitude"), extravars=[ Bunch(data=times,name="time",dims=("time",),dtype="d", attributes=Bunch(units="seconds since 1970-01-01 00:00:00.0 0:00")), Bunch(data=geo.lat,name="latitude",dims=("latitude","longitude"),dtype="f", attributes=Bunch(units="degrees")), Bunch(data=geo.lon,name="longitude",dims=("latitude","longitude"),dtype="f", attributes=Bunch(units="degrees"))])
def temp_stats(dir_name, fulldomain=False): if verbose: files=glob.glob(dir_name+temp_file_search) files.sort() print(files[0]+" "+files[-1]) data=mygis.read_files(dir_name+temp_file_search,"mean_temperature",axis=0, verbose=verbose) if not fulldomain: data=data[:,1549:800:-1,:600] if verbose: print("mean annual temperature") mygis.write(dir_name+"annual_mean_temp.nc",stats.mean(data)) ndays=data.shape[0] dates=compute_dates(ndays) curmonth=np.empty(ndays,dtype=bool) for month in range(12): for i in range(ndays): curmonth[i] = (dates[i].month == (month+1)) if verbose: print("month{:02}".format(month+1)) if verbose: print(" mean") mygis.write(dir_name+"month{:02}_mean_temp.nc".format(month+1),stats.mean(data[curmonth,:,:])) return data
def write_pgw(pgw): """write pgw ncep data to output files""" lat=pgw.geo.lat[:,0] lon=pgw.geo.lon[0,:]+360 lat_atts =Bunch(long_name="Latitude", standard_name="latitude", units="degrees_north", axis="Y",actual_range=np.array([lat.min(),lat.max()])) lon_atts =Bunch(long_name="Longitude", standard_name="longitude", units="degrees_east", axis="X",actual_range=np.array([lon.min(),lon.max()])) time_atts=Bunch(long_name="Time",standard_name="time",units="hours since 1-1-1 00:00:00", axis="T",avg_period="0000-00-00 06:00:00", delta_t="0000-00-00 06:00:00") datadims=("time","lat","lon") evars=[ Bunch(data=lat, name="lat", dtype="f",attributes=lat_atts,dims=("lat",)), Bunch(data=lon, name="lon", dtype="f",attributes=lon_atts,dims=("lon",)), Bunch(data=None,name="time",dtype="f",attributes=time_atts,dims=("time",)), ] for i in range(len(pgw.data)): times=pgw.time[i] evars[2].data=times evars[2].attributes.actual_range=np.array([times.min(),times.max()]) mygis.write(pgw.files[i],pgw.data[i],dtype="f",varname=ncep_var, dims=datadims,attributes=data_atts,extravars=evars)
def calc_mean_pressure_levels(gcm="ccsm"): calc_date=global_timeing[gcm] y0=global_y0[gcm] m0=global_m0[gcm] calc_vert_coord=global_vert_coords[gcm] filesearch=global_filesearch output_data=[dict() for i in range(12)] files=glob.glob(filesearch.format(varname=varnames[0])) files.sort() for f in files: # start with an arbitrary (must be 3D) variable to read timing and 3D coordinates base_file=f dates=mygis.read_nc(base_file,"time").data last_date=calc_date(dates[-1],y0=y0,m0=m0,filename=base_file) print(last_date.year) # print("{0.year}/{0.month}/{0.day} {0.hour}:{0.minute}:{0.second}".format(last_date)) if last_date.year>=start_year: pressures=calc_vert_coord(base_file) for i,d in enumerate(dates): curdate=calc_date(d,y0=y0,m0=m0,filename=base_file) if curdate.year>=start_year: if "p" in output_data[curdate.month-1]: output_data[curdate.month-1]["p"]+=pressures[i,...] output_data[curdate.month-1]["p_n"]+=1 else: output_data[curdate.month-1]["p"]=np.zeros(pressures[i,...].shape) output_data[curdate.month-1]["p"][:]=pressures[i,...] output_data[curdate.month-1]["p_n"]=1 for i in range(12): output_data[i]["p"]/=output_data[i]["p_n"] mygis.write(gcm+"_month{0:02}_mean_plevel.nc".format(i+1),data=output_data[i]["p"],varname="pressure")
def write_icar_file(filename,data): """write the output""" base_var=data.data.pop() if verbose:print("Writing:"+filename) mygis.write(filename, base_var.data, varname=base_var.name, dims=base_var.dims, attributes=base_var.attributes, global_attributes=data.global_atts, extravars=data.data)
def write_file(date,info,erai): """writes ERAi input data to a netcdf file""" filename=str(date).replace(" ","_") dims=("level","lat","lon") extra_vars=[] # 3D variables # cloud,ice,qv,u,v,t,p # 2D variables # hgt,latent_heat,PBL_height,sensible_heat,sfc_hgt (sfc_hgt not used currently should be ~the same as hgt) atts=Bunch(long_name="Cloud liquid water content",units="kg kg**-1") extra_vars.append(Bunch(name="cloud",data=erai["cloud"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="Cloud ice water content",units="kg kg**-1") extra_vars.append(Bunch(name="ice",data=erai["ice"],dims=dims,dtype="f",attributes=atts)) # used as primary variable in io.write # atts=Bunch(long_name="Specific Humidity",units="kg kg**-1") # extra_vars.append(Bunch(name="qv",data=erai["qv"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="U (E/W) wind speed",units="m s**-1") extra_vars.append(Bunch(name="u",data=erai["u"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="V (N/S) wind speed",units="m s**-1") extra_vars.append(Bunch(name="v",data=erai["v"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="Potential Temperature",units="kg kg**-1") extra_vars.append(Bunch(name="theta",data=erai["t"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="Pressure",units="Pa") extra_vars.append(Bunch(name="p",data=erai["p"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="Atmospheric Elevation",units="m") extra_vars.append(Bunch(name="z",data=erai["z"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="Topographic Height",units="m") extra_vars.append(Bunch(name="hgt",data=erai["hgt"],dims=dims[1:],dtype="f",attributes=atts)) atts=Bunch(long_name="Surface Latent Heat flux (positive up)",units="W m**-2") extra_vars.append(Bunch(name="latent_heat",data=erai["latent_heat"],dims=dims[1:],dtype="f",attributes=atts)) atts=Bunch(long_name="Surface Sensible Heat flux (positive up)",units="W m**-2") extra_vars.append(Bunch(name="sensible_heat",data=erai["sensible_heat"],dims=dims[1:],dtype="f",attributes=atts)) atts=Bunch(long_name="Planetary Boundary Layer Height",units="m") extra_vars.append(Bunch(name="PBL_height",data=erai["PBL_height"],dims=dims[1:],dtype="f",attributes=atts)) atts=Bunch(long_name="latitude",units="degrees") extra_vars.append(Bunch(name="lat",data=info.lat_data,dims=dims[1:],dtype="f",attributes=atts)) atts=Bunch(long_name="longitude",units="degrees") extra_vars.append(Bunch(name="lon",data=info.lon_data,dims=dims[1:],dtype="f",attributes=atts)) qvatts=Bunch(long_name="Specific Humidity",units="kg kg**-1") # write to output file mygis.write(filename=filename,varname="qv",data=erai.qv,attributes=qvatts,dtype="f", extravars=extra_vars,history=" Produced by erai2icar v."+info.version)
def convert_to_netcdf(filename="INTERCEP.TXT",outputfile="interception_data",data=None): """Convert an interceptometer text datafile (or data tuple) into netcdf This is separated out a bit from the other routines because it requires a netcdf library be installed Along with my helper module (mygis) to write the netcdf file and I'm guessing most won't use this Also uses my date_fun module to convert the datetime objects into modified julian days (easier for netcdf) """ import mygis from bunch import Bunch import date_fun if data==None: data=load_data(filename) mjd=date_fun.datetime2mjd(data[0]) extravars=[Bunch(data=mjd,name="mjd",dims=("y",),dtype="d", attributes=Bunch(units="days",description="Modified Julian Day"))] desc="raw voltage read from interceptometer poteniometers and datalogger voltage levels" cols="0=milliseconds, 1-4=Tree1:W,E,S,N, 5-7,9=Tree2,8=Tree temperature, 10-13=Tree3, 14,15,16=junk,17=logger temp,18=G,19=Ex(~4.95v)" note=""" To convert data to voltage, divide by 32768 and multiply by 6.144 Also, normalize by the excitation voltage by dividing by the data in column 19 (0 based numbering) I'll figure out conversions for Temperature sensors later""" attributes=Bunch(description=desc,columns=cols,note=note) mygis.write(outputfile, data[1],dtype="h",units="volts*32768/6.144", extravars=extravars,attributes=attributes)
def write_outputfile(filename, dataset, mapset, dx): """docstring for write_outputfile""" globalatts = mapset.projparams globalatts.pop("x_0") globalatts.pop("y_0") globalatts.pop("units") globalatts["dx"] = dx latvar = Bunch(data=dataset.lat, name="lat", dims=('lat', 'lon'), dtype='f', attributes=Bunch(long_name="latitude", units="degrees")) lonvar = Bunch(data=dataset.lon, name="lon", dims=('lat', 'lon'), dtype='f', attributes=Bunch(long_name="longitude", units="degrees")) ulatvar = Bunch(data=dataset.ulat, name="lat_u", dims=('lat', 'lon_u'), dtype='f', attributes=Bunch(long_name="latitude_ugrid", units="degrees")) ulonvar = Bunch(data=dataset.ulon, name="lon_u", dims=('lat', 'lon_u'), dtype='f', attributes=Bunch(long_name="longitude_ugrid", units="degrees")) vlatvar = Bunch(data=dataset.vlat, name="lat_v", dims=('lat_v', 'lon'), dtype='f', attributes=Bunch(long_name="latitude_vgrid", units="degrees")) vlonvar = Bunch(data=dataset.vlon, name="lon_v", dims=('lat_v', 'lon'), dtype='f', attributes=Bunch(long_name="longitude_vgrid", units="degrees")) landvar = Bunch(data=dataset.xland, name="xland", dims=('lat', 'lon'), dtype='i', attributes=Bunch(long_name="land-sea-mask", units="[0,1]")) evars = [latvar, lonvar, ulatvar, ulonvar, vlatvar, vlonvar, landvar] # evars=[latvar,lonvar] print("Writing: " + filename) mygis.write(filename, dataset.topo, varname="HGT", dims=('lat', 'lon'), dtype='f', attributes=Bunch(long_name="topography", units="m"), extravars=evars, global_attributes=globalatts, history="data from make_domain.py")
def main(): """convert probability coefficients to precip amounts""" print("Reading Time data") timeseconds=mygis.read_nc(probfile,"time").data time=[base_date+datetime.timedelta(i/86400.0) for i in timeseconds] print("Reading Spatial data") lat=mygis.read_nc(probfile,"latitude").data lon=mygis.read_nc(probfile,"longitude").data print("Reading Probability regressions") prob=mygis.read_nc(probfile,"coefficient").data print("Reading Precip regressions") prcp=mygis.read_nc(prcpfile,"coefficient").data nx=prob.shape[-1] ny=prob.shape[-2] nt=prob.shape[-3] nv=prob.shape[0] nt=len(time) # print(nv) # print(file_variable,variable_name) # nt=30 print(time[0],time[-1],len(time),nt) output_data=np.zeros((nt,ny,nx)) output_data2=np.zeros((nt,ny,nx)) output_data3=np.zeros((nt,ny,nx)) output_data4=np.zeros((nt,ny,nx)) print("starting") nvars=(nv-3)/2 # minus 3 for constant, output, residual columns # /2 because we get both the coefficient and the value used for each for i in range(nt): print(" ",i," / ",nt,end="\r") sys.stdout.flush() # gefs_data=load_gefs(time[i],geo_file=probfile) rand_data=load_rand(i) curprec=prcp[nvars+1,i,...] curprob=prob[nvars+1,i,...] # for v in range(1,nv): # # gefs_data[v-1]=gefs_data[v-1]*gains[v-1]+offsets[v-1] # curprec+=gefs_data[v-1]*prcp[v,i,...] # curprob+=gefs_data[v-1]*prob[v,i,...] # curprec[norm.cdf(rand_data)<curprob]=0 # output_data[i,...]=curprec # output_data2[i,...]=curprob curprob/=200.0 curprob[curprob<norm.cdf(rand_data)]=0 output_data3[i,...]=curprob # anywhere the probability of precip is less than the random number, set precip to 0 curprec[curprob<norm.cdf(rand_data)]=0 # add a random component to the precipitation rescaled by the residuals curprec+=prcp[nvars+2,i,...] * rand_data curprec[curprec<0]=0 output_data4[i,...]=curprec**(1/3.0) # mygis.write(output_file,output_data) # mygis.write(output_file+"prob",output_data2) mygis.write(output_file+"prob_thresh",output_data3) mygis.write(output_file+"prec_thresh",output_data4)
def write_interpolated_6hrly(data,z,file_name=None): """docstring for write_interpolated_6hrly""" outputq_file="interpolated_{}.nc" if file_name!=None: outputq_file="{}_"+file_name for k in data.keys(): if k!="z": mygis.write(outputq_file.format(k),data[k],varname=k)
def write_interpolated_6hrly(data, z, file_name=None): """docstring for write_interpolated_6hrly""" outputq_file = "interpolated_{}.nc" if file_name != None: outputq_file = "{}_" + file_name for k in data.keys(): if k != "z": mygis.write(outputq_file.format(k), data[k], varname=k)
def main (filename, xmin,xmax, ymin,ymax, outputfile, varnames): d=mygis.Dataset(filename) if varnames==None: varnames=d.variables.keys() else: varnames=varnames.split(",") outputvariables=[] if verbose:print("Reading data") for v in varnames: if verbose:print(v) ncdata=d.variables[v] if (xmax!=None) and (v in EW_STAGGER_VARS): xmax+=1 if (ymax!=None) and (v in NS_STAGGER_VARS): ymax+=1 data=subset_data(ncdata, xmin,xmax, ymin,ymax) if (xmax!=None) and (v in EW_STAGGER_VARS): xmax-=1 if (ymax!=None) and (v in NS_STAGGER_VARS): ymax-=1 atts=Bunch() attrlist=ncdata.ncattrs() for a in attrlist: atts[a]=ncdata.getncattr(a) outputvariables.append(Bunch(data=data, name=v, attributes=atts, dims=ncdata.dimensions, dtype=data.dtype)) global_atts=Bunch() attrlist=d.ncattrs() for a in attrlist: if a!="history": global_atts[a]=d.getncattr(a) if "history" in attrlist: history="subset by subset_netcdf.py; "+d.getncattr("history") else: history="subset by subset_netcdf.py" v=outputvariables[0] if len(outputvariables)>1: outputvariables=outputvariables[1:] else: outputvariables=None if verbose:print("Writing output") mygis.write(outputfile,v.data, dtype=v.dtype, varname=v.name, dims=v.dims, attributes=v.attributes, extravars=outputvariables, global_attributes=global_atts)
def write_icar_file(filename, data): """write the output""" base_var = data.data.pop() if verbose: print("Writing:" + filename) mygis.write(filename, base_var.data, varname=base_var.name, dims=base_var.dims, attributes=base_var.attributes, global_attributes=data.global_atts, extravars=data.data)
def update_files_for_year(year,res,variable,model,mask): files=glob.glob(search_dir+model+"/"+variable+"/BCSAR*"+res+"*"+str(year)+"_*") files.sort() time_info.data=time_gen(year,model) data=np.concatenate(myio.read_files(files,variable)) for i in range(data.shape[0]): data[i,...][mask]=FILL_VALUE info=data_info[variable] newfilename=files[0].replace(".nc","")[:-3]+".nc" print(newfilename) extra_vars=[lat_info,lon_info,time_info] myio.write(newfilename,data,varname=info.name,dtype=info.dtype,dims=info.dims, attributes=info.attributes,extravars=extra_vars)
def geo_interpolate(data,geo): """geographic (bilinear) interpolation between the CCSM input grid and the NCEP grid""" geolut=load_geoLUT(data.geo.lat,data.geo.lon,geo.lat,geo.lon) outputdata=np.zeros((data.data.shape[0]+2,geo.lat.shape[0],geo.lat.shape[1])) for i in range(4): y=geolut[:,:,i,0].astype('i') x=geolut[:,:,i,1].astype('i') outputdata[1:-1,...]+=np.float32(data.data[:,y,x]*geolut[np.newaxis,:,:,i,2]) outputdata[-1,...]=outputdata[1,...] #wrap around by one month (append january after december) to aid temporal interpolation outputdata[0,...]=outputdata[-2,...] #wrap around by one month (prepend december before january) to aid temporal interpolation mygis.write(ncep_var+"_pgw_test_file.nc",outputdata) outputdata[outputdata>maxval]=maxval outputdata[outputdata<minval]=minval return outputdata
def main(): """Compare WRF, SNODAS, and Lidar data on the lidar domain""" snowdensity=0.35 #from May 1 2010 SNOTEL (2011,2013 were similar, 2014 was 0.4), at the saddle in May 1 2010 it was 0.4 snodasyears=[2010,2004,2005] wdata=[wrf.load("wrf/SWE_daily.nc",extractday=212+5+int(np.round(365.25*year))) for year in [3,4]] wdata.extend([wrf.load("wrf/SWE_daily.nc",extractday=212+20+int(np.round(365.25*year))) for year in [3,4]]) print(len(wdata)) sdata=[snodas.load("snodas/SWE_Daily0600UTC_WesternUS_{}.dat".format(year),extractday=125) for year in snodasyears] sdata.extend([snodas.load("snodas/SWE_Daily0600UTC_WesternUS_{}.dat".format(year),extractday=140) for year in snodasyears]) print(len(sdata)) # sdata=[snodas.load("snodas/SWE_Daily0600UTC_WesternUS_{}.dat".format(year),extractday=120) for year in range(2004,2013)] # sdata.insert(0,sdata.pop(6)) #move year 2010 to the begining of the list ldata=lidar.load_fast(loc="lidar/",geofile="snow-on-dem.nc",decimation_factor=10) print("Calculating WRF weights") try: wrfweights=mygis.read_nc("wrf2lidar_weights.nc").data except: wrfweights =gen_weights(ldata.lat,ldata.lon,wdata[0].lat,wdata[0].lon,mask=(ldata.dem>1500)) mygis.write("wrf2lidar_weights.nc",wrfweights) # wrfbounds =find_bounds(wrfweights) print("Calculating SNODAS weights") try: snodasweights=mygis.read_nc("snodas2lidar_weights.nc").data except: snodasweights=gen_weights(ldata.lat,ldata.lon,sdata[0].lat,sdata[0].lon,mask=(ldata.dem>1500)) mygis.write("snodas2lidar_weights.nc",snodasweights) # snodasbounds =find_bounds(snodasweights) wdata[0].lc[wrfweights==0]=0 sdata[0].lc[snodasweights==0]=0 print("Binning by elevations...") #dx=4000) #note use dx=lidar_dx because weights are lidar gridcells... wrfbyz=[bin_by_elevation(w.data,w.dem,wdata[0].lc,weights=wrfweights,dz=200,dx=10) for w in wdata] print("Binning by elevations...") snodasbyz=[bin_by_elevation(s.data,sdata[0].dem,sdata[0].lc,weights=snodasweights,dz=150,dx=10) for s in sdata]#dx=926) print("Binning by elevations...") lidarbyz=bin_by_elevation(ldata.data*snowdensity,ldata.dem,ldata.lc,dz=100,dx=10) print("Plotting") plot_volumes(wrfbyz,snodasbyz,lidarbyz) snodasyears=[2010,2004,2005,2010.2,2004.2,2005.2] for i in range(len(snodasbyz)): plot_elevation_bands(snodasbyz[i],outputfile="SNODAS_swe_by_z_{}.png".format(snodasyears[i]),title="SNODAS SWE {}".format(snodasyears[i]))
def dtr_stats(dir_name, tmax_data,tmin_data): data=tmax_data-tmin_data if verbose: print("mean annual diurnal temperature range") mygis.write(dir_name+"annual_mean_dtr.nc",stats.mean(data)) ndays=data.shape[0] dates=compute_dates(ndays) curmonth=np.empty(ndays,dtype=bool) for month in range(12): for i in range(ndays): curmonth[i] = (dates[i].month == (month+1)) if verbose: print("month{:02}".format(month+1)) if verbose: print(" mean") mygis.write(dir_name+"month{:02}_mean_dtr.nc".format(month+1),stats.mean(data[curmonth,:,:]))
def main(model, scenario, varname): files=glob.glob(model+"/"+scenario+"/day/atmos/day/r1i1p1/latest/"+varname+"/*.nc") files.sort() atts = mygis.read_atts(files[0],varname) gatts = mygis.read_atts(files[0],global_atts=True) geo = mygis.read_geo(files[0]) lat = Bunch(data=geo.lat,name="lat",dtype='f',attributes=geo.latatts,dims=('lat','lon')) lon = Bunch(data=geo.lon,name="lon",dtype='f',attributes=geo.lonatts,dims=('lat','lon')) datadims=('time','lat','lon') start_year = start_years[scenario] end_year = end_years[scenario] print("Getting time data") years, months = read_times(files[0]) print(files[0].split("/")[-1]) print(years[0],years[-15:]) print(months[0],months[-15:]) print("Reading {} data".format(varname)) data = mygis.read_nc(files[0],varname,returnNCvar=True) output = np.zeros((13,data.data.shape[1],data.data.shape[2])) data.ncfile.close() n = np.zeros(13) for f in files: print(" "+f) years, months = read_times(f) data = mygis.read_nc(f,varname,returnNCvar=True) for i in range(data.data.shape[0]): if (years[i]>start_year) & (years[i]<=end_year): output[months[i]-1] += data.data[i] n[months[i]-1]+=1 data.ncfile.close() output[-1]=output[:-1].sum(axis=0) n[-1] = n[:-1].sum(axis=0) if varname=="pr": n[-1]/=365.0 n[:-1] /= month_lengths[1:] n/=86400.0 print("writing") results = output/n[:,np.newaxis,np.newaxis] mygis.write(outputfile.format(model, scenario,varname),results, dims=datadims, extravars=[lat,lon], varname=varname, attributes=atts, global_attributes=gatts)
def write(filename,data,geo): """write DEM data to a file after subsetting to 'good' data""" goodpoints=np.where(data>0) ymin=np.min(goodpoints[0]) ymax=np.max(goodpoints[0])+1 xmin=np.min(goodpoints[1]) xmax=np.max(goodpoints[1])+1 output_data=data[ymin:ymax,xmin:xmax] output_lat=geo.lat[ymin:ymax] output_lon=geo.lon[xmin:xmax] lat_info.data=output_lat lon_info.data=output_lon data_info.subset="[{},{},{},{}]".format(ymin,ymax,xmin,xmax) extra_vars=[lat_info,lon_info] mygis.write(filename,output_data,varname=data_info.name,dtype=data_info.dtype,dims=data_info.dims, attributes=data_info.attributes,extravars=extra_vars,history="dem2snodas")
def temp_stats(names,data1,data2,info): """Calculate temperature statistics names = a list of dataset names data1 = a list of tmax datasets data2 = a list of tmin datasets info = a structure with output_base = output filename 'prefix' if it includes 'annual' then additional annual statistics are calculated year_starts = indicies into tmin/tmax for the starting point of each year used to calculate e.g. internanual variability, growing season length, etc""" for n,tmax,tmin in zip(names,data1,data2): if tmax.min()>100: tmax-=273.15 if tmin.min()>100: tmin-=273.15 out=info.output_base+"_"+n tave=(tmax+tmin)/2 dtr=tmax-tmin print(out) for thisvar,t in zip(["tmin","tmax","tave","dtr"],[tmin,tmax,tave,dtr]): print("MAT "+thisvar) print(t.shape) mean_annual_temp=stats.mean(t,nyears=t.shape[0]) #note nyears makes more sense for precip where you want to accumulated precip print(mean_annual_temp.mean()) io.write(out+"_mean_"+thisvar,mean_annual_temp) print("interannual "+thisvar) interannual=stats.interannual(t,info.year_starts,fun=np.mean) print(interannual.mean()) io.write(out+"_interannual_"+thisvar,interannual) hist=np.vstack(stats.histogram(t,precip=False)) io.write(out+"_histogram_"+thisvar,hist) if re.match(".*annual",n): frostdays,growing_season=stats.temperature_indicies(tmin,info.year_starts) print("Growing season") print(growing_season.mean()) io.write(out+"_growing_season",growing_season) print("frostdays") print(frostdays.mean()) io.write(out+"_frostdays",frostdays)
def main(d=None,outputfile="auto_correlations"): if d==None: d=load_data() shape=d.shape minlag=1 maxlag=50 timelags=4 rs=np.zeros((maxlag-minlag+1+timelags+1,shape[1],shape[2]))+1E20 delta=(shape[1]-maxlag*2)/20.0 current=0.0 for i in range(shape[1]): if (i-maxlag)>=current: current+=delta print(str(int(np.round(100.0*(i)/(shape[1]-1))))+"%",end=" ") sys.stdout.flush() for j in range(shape[2]): if d[0,i,j]<1e10: for lag in range(minlag,min(shape[1]-i-1,shape[2]-j-1,maxlag+1)): r=0.0 n=0.0 # check for non-fill locations if (d[0,i+lag,j]<1E10): r2,p=stats.pearsonr(d[:,i,j],d[:,i+lag,j]) r+=r2 n+=1 if d[0,i,j+lag]<1E10: r4,p=stats.pearsonr(d[:,i,j],d[:,i,j+lag]) r+=r4 n+=1 if n>0: rs[lag-1,i,j]=r/n for t in range(1,timelags): r,p=stats.pearsonr(d[t:,i,j],d[:-t,i,j]) rs[maxlag+t,i,j]=r print("...got data.") if glob.glob(outputfile+".nc"): os.rename(outputfile+".nc",outputfile+"_old.nc") mygis.write(outputfile,rs) if plt!=None: vis_data(rs,minlag,maxlag,timelags, outputfile)
def write_output(outputfile,data,geo): """docstring for write_output""" vardata=[ Bunch(data=data[erai_varnames[i]],name=v,dtype="f",attributes=atts[i],dims=dims[i]) for i,v in enumerate(gcm_varnames) ] # for v in vardata: # print(v.name,v.dims,v.data.shape) vardata.insert(0,Bunch(data=geo.lon,name="lon",dtype="f",attributes=lonatts,dims=londim)) vardata.insert(0,Bunch(data=geo.lat,name="lat",dtype="f",attributes=latatts,dims=latdim)) # if geo.p!=None: # vardata.append(Bunch(data=geo.p,name="p",dtype="f",attributes=patts,dims=pdim)) if len(geo.p.shape)==1: ny,nx=geo.lat.shape geo.p=geo.p[:,np.newaxis,np.newaxis].repeat(ny,axis=1).repeat(nx,axis=2) mygis.write(outputfile,data=geo.p,varname="p",dtype="f",attributes=patts,dims=pdim, history="Regridding performed by cmip/era2gcm.py",extravars=vardata)
def main(pattern="*.tif",ndays=2,verbose=True): files,dates= load_info(pattern) if verbose:print("Loading data") data=load_data(files,dates,verbose=verbose) # set up variables we will need last_snow=np.zeros(data[0].shape) snow_on_again=np.zeros(data[0].shape) snow_on=np.empty(data[0].shape,dtype=bool) snow_on[:]=True last_date=0 for date,img in zip(dates,data): if date<max_reliable_DSD: if verbose: print("Processing Day of Year: "+str(date),end=" \r") sys.stdout.flush() snow_off_now=(img==snow_threshold) #points to mark as snow off are those that had snow previously, but don't now snow_off_points=np.where(snow_on & snow_off_now) # at those points, set DSD as occuring between the current date and the last snow covered date last_snow[snow_off_points]=(date+last_date)/2.0 # also mark them as no longer having snow snow_on[snow_off_points]=False # and set the snow on again counter to 0 snow_on_again[snow_off_points]=0 # at points that have snow, add one to the snow on again counter snow_on_again[(img<cloud_threshold)&(img>snow_threshold)]+=1 # after more than n days in a row set snow_on flag again snow_on[snow_on_again>ndays]=True last_date=date if verbose:print("") for i in range(nfilter_passes): print("Filtering data: pass {}".format(i)) outputdata=final_spatial_filter(last_snow,verbose) last_snow[:]=outputdata[:] mygis.write(output_file,outputdata)
def vid2nc(filename="/Users/gutmann/Desktop/IMG_2303.m4v",resolution=(1920,1080,3),n=-1, outputfile="movie_data.nc"): """docstring for vid2nc""" import mygis import video_reader as vr vid=vr.Video_Reader(filename,resolution) outputdata=[] if n>0: for i in range(n): outputdata.append(vid.next()[np.newaxis,:,:,:]) else: i=0 for v in vid: print("\rFrame : {}".format(i),end="") sys.stdout.flush() outputdata.append(v[np.newaxis,:,:,:]) i+=1 outputdata=np.concatenate(outputdata,axis=0) mygis.write(outputfile,outputdata)
def main(): """Combine a series of LIDAR processed netcdf files Files should have one variable "data" which is (2,y,x) assumes data[0,...] is a minimum and data[1,...] is a maximum When combining files, if two files have data for the same gridcell Take the min of data[0,...] from each and the max of data[1,...] Output a new file """ outputfilename="compiled_lidar.nc" files=glob.glob("ot*.nc") outputdata=mygis.read_nc(files[0]).data for f in files[1:]: print(f) data=mygis.read_nc(f).data process(outputdata,data,index=0,func=np.min) process(outputdata,data,index=1,func=np.max) mygis.write(outputfilename,outputdata)
def calc_2d_means(gcm,varname="ps",filesearch=None): """docstring for calc_2d_means""" calc_date=global_timeing[gcm] y0=global_y0[gcm] if varname=="sst": y0=None m0=global_m0[gcm] if (filesearch==None): filesearch=global_filesearch.format(varname=varname) output_data=[dict() for i in range(12)] files=glob.glob(filesearch) files.sort() for f in files: print(f) # start with an arbitrary (must be 3D) variable to read timing and 3D coordinates base_file=f dates=mygis.read_nc(base_file,"time").data last_date=calc_date(dates[-1],y0=y0,m0=m0,filename=base_file) if last_date.year>=start_year: print("{year}/{month}/{day} {hour}:{minute}:{second}".format(**last_date)) curdata=mygis.read_nc(f,varname).data for i,d in enumerate(dates): curdate=calc_date(d,y0=y0,m0=m0,filename=base_file) if curdate.year>=start_year: if varname=="sst": #median filter pushes data to cover more land, and removes random speckling in some datasets (e.g. CNRM!) curdata[i,...]=med_filt(curdata[i,...]) if varname in output_data[curdate.month-1]: output_data[curdate.month-1][varname]+=curdata[i,...] output_data[curdate.month-1][varname+"_n"]+=1 else: output_data[curdate.month-1][varname]=np.zeros(curdata[i,...].shape) output_data[curdate.month-1][varname][:]=curdata[i,...] output_data[curdate.month-1][varname+"_n"]=1 for i in range(12): output_data[i][varname]/=output_data[i][varname+"_n"] mygis.write(gcm+"_month{0:02}_mean_{1}.nc".format(i+1,varname),data=output_data[i][varname],varname=varname)
def lidar2modscag(modscag_file="MODSCAG/fsca2008.dat",lidar_loc="lidar/",lidar_geo="snow-on-dem.nc",decimation_factor=5): from wsc import modscag,regrid print("loading modscag geo data") modscagdata=modscag.load(modscag_file) modscagdata.lat=modscagdata.lat[::-1] print("loading lidar data") data=load_fast(loc=lidar_loc,geofile=lidar_geo,decimation_factor=decimation_factor) if glob.glob("lidar2modscag.geolut.pickle"): print("reading geoLUT") geoLUT_depickler=pickle.Unpickler(open("lidar2modscag.geolut.pickle","r")) geoLUT=geoLUT_depickler.load() else: geoLUT=None print("computing mean") geoLUT,low_res_lidar_mean=regrid.agg(data,modscagdata.lat,modscagdata.lon,geo_lut=geoLUT,agg_func=np.mean) print("computing std") geoLUT,low_res_lidar_var =regrid.agg(data,modscagdata.lat,modscagdata.lon,geo_lut=geoLUT,agg_func=np.std) print("computing max") geoLUT,low_res_lidar_max =regrid.agg(data,modscagdata.lat,modscagdata.lon,geo_lut=geoLUT,agg_func=np.max) if not glob.glob("lidar2modscag.geolut.pickle"): print("writing geolut") geoLUT_pickler=pickle.Pickler(open("lidar2modscag.geolut.pickle","w")) geoLUT_pickler.dump(geoLUT) io.write("lidar2modscag_mean.nc", low_res_lidar_mean.data) io.write("lidar2modscag_std.nc", low_res_lidar_var.data) io.write("lidar2modscag_max.nc", low_res_lidar_max.data)
def write_outputfile(filename,dataset,mapset,dx): """docstring for write_outputfile""" globalatts=mapset.projparams globalatts.pop("x_0") globalatts.pop("y_0") globalatts.pop("units") globalatts["dx"]=dx latvar=Bunch(data=dataset.lat,name="lat",dims=('lat','lon'),dtype='f',attributes=Bunch(long_name="latitude",units="degrees")) lonvar=Bunch(data=dataset.lon,name="lon",dims=('lat','lon'),dtype='f',attributes=Bunch(long_name="longitude",units="degrees")) ulatvar=Bunch(data=dataset.ulat,name="lat_u",dims=('lat','lon_u'),dtype='f',attributes=Bunch(long_name="latitude_ugrid",units="degrees")) ulonvar=Bunch(data=dataset.ulon,name="lon_u",dims=('lat','lon_u'),dtype='f',attributes=Bunch(long_name="longitude_ugrid",units="degrees")) vlatvar=Bunch(data=dataset.vlat,name="lat_v",dims=('lat_v','lon'),dtype='f',attributes=Bunch(long_name="latitude_vgrid",units="degrees")) vlonvar=Bunch(data=dataset.vlon,name="lon_v",dims=('lat_v','lon'),dtype='f',attributes=Bunch(long_name="longitude_vgrid",units="degrees")) landvar=Bunch(data=dataset.xland,name="xland",dims=('lat','lon'),dtype='i',attributes=Bunch(long_name="land-sea-mask",units="[0,1]")) evars=[latvar,lonvar,ulatvar,ulonvar,vlatvar,vlonvar,landvar] # evars=[latvar,lonvar] print("Writing: "+filename) mygis.write(filename, dataset.topo,varname="HGT",dims=('lat','lon'),dtype='f', attributes=Bunch(long_name="topography",units="m"), extravars=evars, global_attributes=globalatts, history="data from make_domain.py")
def main(filesearch, outputfile): files = glob.glob(filesearch) files.sort() d0 = mygis.read_nc(files[0], "rain").data outputdata = np.zeros((len(files), d0.shape[1], d0.shape[2])) if verbose: print("Outputdata.shape : " + str(outputdata.shape)) last_data = d0[0] * 0 lost_rain = d0[0] * 0 for i, f in enumerate(files): if verbose: print(f) d = mygis.read_nc(f, "rain").data + lost_rain if np.max(d[-1, 5:-5, 5:-5]) < np.max(last_data[5:-5, 5:-5]): if verbose: print("Updating lost_rain") d -= lost_rain lost_rain = update_data(d, last_data) outputdata[i] = d[-1] - last_data last_data = d[-1] if verbose: print("Writing output file") mygis.write(outputfile, outputdata, varname="pr")
def write_file(date, info, erai): """writes ERAi input data to a netcdf file""" filename = str(date).replace(" ", "_") dims = ("time", "level", "lat", "lon") dims2dt = ("time", "lat", "lon") extra_vars = [] # 3D variables # cloud,ice,qv,u,v,t,p # 2D variables # hgt,latent_heat,PBL_height,sensible_heat,sfc_hgt (sfc_hgt not used currently should be ~the same as hgt) # atts=Bunch(long_name="Cloud liquid water content",units="kg kg**-1", coordinates='latitude longitude') # extra_vars.append(Bunch(name="cloud",data=erai["cloud"],dims=dims,dtype="f",attributes=atts)) # # atts=Bunch(long_name="Cloud ice water content",units="kg kg**-1", coordinates='latitude longitude') # extra_vars.append(Bunch(name="ice",data=erai["ice"],dims=dims,dtype="f",attributes=atts)) atts = Bunch(long_name="Relative Humidity", units="[]", coordinates='latitude longitude z time') extra_vars.append( Bunch(name="rh", data=erai["rh"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="U (E/W) wind speed", units="m s**-1", coordinates='latitude longitude z time') extra_vars.append( Bunch(name="u", data=erai["u"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="V (N/S) wind speed", units="m s**-1", coordinates='latitude longitude z time') extra_vars.append( Bunch(name="v", data=erai["v"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="Potential Temperature", units="kg kg**-1", coordinates='latitude longitude z time') extra_vars.append( Bunch(name="theta", data=erai["t"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="Pressure", units="Pa", coordinates='latitude longitude z time') extra_vars.append( Bunch(name="p", data=erai["p"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="Atmospheric Elevation", units="m", coordinates='latitude longitude', axis="Z") extra_vars.append( Bunch(name="z", data=erai["z"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="Topographic Height", units="m", coordinates='latitude longitude time') extra_vars.append( Bunch(name="hgt", data=erai["sfc_hgt"], dims=dims[2:], dtype="f", attributes=atts)) atts = Bunch(long_name="Total Precipitation", units="kg m**-2", coordinates='latitude longitude time') extra_vars.append( Bunch(name="pcp", data=erai["precip_total"], dims=dims2dt, dtype="f", attributes=atts)) atts = Bunch(long_name="Convective Precipitation", units="kg m**-2", coordinates='latitude longitude time') extra_vars.append( Bunch(name="pcp_conv", data=erai["precip_conv"], dims=dims2dt, dtype="f", attributes=atts)) atts = Bunch(long_name="Surface solar radiation (downwards)", units="W m**-2", coordinates='latitude longitude time') extra_vars.append( Bunch(name="swdown", data=erai["sw"], dims=dims2dt, dtype="f", attributes=atts)) atts = Bunch(long_name="Surface longwave radiation (downwards)", units="W m**-2", coordinates='latitude longitude time') extra_vars.append( Bunch(name="lwdown", data=erai["lw"], dims=dims2dt, dtype="f", attributes=atts)) atts = Bunch(long_name="Surface Latent Heat flux (positive up)", units="W m**-2", coordinates='latitude longitude time') extra_vars.append( Bunch(name="latent_heat", data=erai["latent_heat"], dims=dims2dt, dtype="f", attributes=atts)) atts = Bunch(long_name="Surface Sensible Heat flux (positive up)", units="W m**-2", coordinates='latitude longitude time') extra_vars.append( Bunch(name="sensible_heat", data=erai["sensible_heat"], dims=dims2dt, dtype="f", attributes=atts)) atts = Bunch(long_name="Planetary Boundary Layer Height", units="m", coordinates='latitude longitude time') extra_vars.append( Bunch(name="PBL_height", data=erai["PBL_height"], dims=dims2dt, dtype="f", attributes=atts)) atts = Bunch(long_name="Skin Temperature", units="K", coordinates='latitude longitude time') extra_vars.append( Bunch(name="tskin", data=erai["tskin"], dims=dims2dt, dtype="f", attributes=atts)) atts = Bunch(long_name="Minimum 2m Temperature", units="K", coordinates='latitude longitude time') extra_vars.append( Bunch(name="t2min", data=erai["tmin"], dims=dims2dt, dtype="f", attributes=atts)) atts = Bunch(long_name="Maximum 2m Temperature", units="K", coordinates='latitude longitude time') extra_vars.append( Bunch(name="t2max", data=erai["tmax"], dims=dims2dt, dtype="f", attributes=atts)) atts = Bunch(long_name="latitude", units="degrees", axis='Y') extra_vars.append( Bunch(name="latitude", data=info.lat_data, dims=dims[2:], dtype="f", attributes=atts)) atts = Bunch(long_name="longitude", units="degrees", axis='X') extra_vars.append( Bunch(name="longitude", data=info.lon_data, dims=dims[2:], dtype="f", attributes=atts)) time_since_1900 = date - datetime.datetime(1900, 1, 1, 0, 0, 0) time = time_since_1900.days + np.float64(time_since_1900.seconds / 86400.0) atts = Bunch(long_name="time", units="days since 1900-01-01", calendar='gregorian', axis="T") extra_vars.append( Bunch(name="time", data=time, dims=(dims[0], ), dtype="d", attributes=atts)) # for e in extra_vars: # print(e.name, e.data.shape, e.dims) qvatts = Bunch(long_name="Specific Humidity", units="kg kg**-1", coordinates='latitude longitude z time') # write to output file mygis.write(filename=filename, varname="qv", data=erai.qv, attributes=qvatts, dtype="f", dims=dims, extravars=extra_vars, history=" Produced by erai2gard " + info.version)
def write_file(date,info,cesm): """writes cesm input data to a netcdf file""" filename=info.output_file+str(date).replace(" ","_") dims=("time","level","lat","lon") dims_3d=("time","lat","lon") dims_2d=("lat","lon") extra_vars=[] # 3D variables (+time) # cloud,ice,qv,u,v,t,p # 3D variables (constant in time) # z # 2D variables (+time) # latent_heat,PBL_height,sensible_heat # 2D variables (constant in time) # hgt, latitude, longitude # used as primary variable in io.write # atts=Bunch(long_name="Specific Humidity",units="kg kg**-1") # extra_vars.append(Bunch(name="qv",data=cesm["qv"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="Cloud liquid water content",units="kg kg**-1") extra_vars.append(Bunch(name="cloud",data=cesm["cloud"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="Cloud ice water content",units="kg kg**-1") extra_vars.append(Bunch(name="ice",data=cesm["ice"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="U (E/W) wind speed",units="m s**-1") extra_vars.append(Bunch(name="u",data=cesm["u"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="V (N/S) wind speed",units="m s**-1") extra_vars.append(Bunch(name="v",data=cesm["v"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="Potential Temperature",units="kg kg**-1") extra_vars.append(Bunch(name="theta",data=cesm["t"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="Pressure",units="Pa") extra_vars.append(Bunch(name="p",data=cesm["p"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="Layer thicknesses",units="m") extra_vars.append(Bunch(name="dz",data=cesm["dz"].astype("f"),dims=(dims[1],),dtype="f",attributes=atts)) atts=Bunch(long_name="Layer height",units="m") extra_vars.append(Bunch(name="z",data=cesm["z"].astype("f"),dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="Topographic Height",units="m") print(cesm["hgt"].shape,dims_2d,cesm.qv.shape) extra_vars.append(Bunch(name="hgt",data=cesm["hgt"],dims=dims_2d,dtype="f",attributes=atts)) atts=Bunch(long_name="Surface Shortwave Radiation (positive down)",units="W m**-2") extra_vars.append(Bunch(name="swdown",data=cesm["sw"],dims=dims_3d,dtype="f",attributes=atts)) atts=Bunch(long_name="Surface Longwave Radiation (positive down)",units="W m**-2") extra_vars.append(Bunch(name="lwdown",data=cesm["lw"],dims=dims_3d,dtype="f",attributes=atts)) atts=Bunch(long_name="Skin Temperature",units="K") extra_vars.append(Bunch(name="tskin",data=cesm["ts"],dims=dims_3d,dtype="f",attributes=atts)) atts=Bunch(long_name="latitude",units="degrees") extra_vars.append(Bunch(name="lat",data=info.lat_data,dims=dims_2d,dtype="f",attributes=atts)) atts=Bunch(long_name="longitude",units="degrees") extra_vars.append(Bunch(name="lon",data=info.lon_data,dims=dims_2d,dtype="f",attributes=atts)) atts=Bunch(long_name="xland",units="") extra_vars.append(Bunch(name="xland",data=cesm["land"],dims=dims_2d,dtype="f",attributes=atts)) for k in cesm.keys(): print(k,cesm[k].shape) qvatts=Bunch(long_name="Specific Humidity",units="kg kg**-1") print(" ") print(" ") print("Writing:"+filename) print(" ") print(" ") # write to output file mygis.write(filename=filename,varname="qv",data=cesm.qv,dims=dims, attributes=qvatts,dtype="f", extravars=extra_vars)#,history=" Produced by cesm2icar v."+info.version)
def write_file(date, info, cmip): """writes cmip input data to a netcdf file""" filename = info.output_file + str(date).replace(" ", "_") print("Outputting: " + filename) dims = ("time", "level", "lat", "lon") dims_3d = ("time", "lat", "lon") dims_2d = ("lat", "lon") extra_vars = [] # 3D variables (+time) # cloud,ice,qv,u,v,t,p # z # 2D variables (constant in time) # hgt, latitude, longitude atts = Bunch(long_name="Cloud liquid water content", units="kg kg**-1") extra_vars.append( Bunch(name="cloud", data=cmip["cloud"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="Cloud ice water content", units="kg kg**-1") extra_vars.append( Bunch(name="ice", data=cmip["ice"], dims=dims, dtype="f", attributes=atts)) # used as primary variable in io.write # atts=Bunch(long_name="Specific Humidity",units="kg kg**-1") # extra_vars.append(Bunch(name="qv",data=cmip["qv"],dims=dims,dtype="f",attributes=atts)) atts = Bunch(long_name="U (E/W) wind speed", units="m s**-1") extra_vars.append( Bunch(name="u", data=cmip["u"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="V (N/S) wind speed", units="m s**-1") extra_vars.append( Bunch(name="v", data=cmip["v"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="Potential Temperature", units="kg kg**-1") extra_vars.append( Bunch(name="theta", data=cmip["t"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="Pressure", units="Pa") extra_vars.append( Bunch(name="p", data=cmip["p"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="Layer thicknesses", units="m") extra_vars.append( Bunch(name="dz", data=cmip["dz"].astype("f"), dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="Layer height", units="m") extra_vars.append( Bunch(name="z", data=cmip["z"].astype("f"), dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="Topographic Height", units="m") # print(cmip["hgt"].shape,dims_2d,cmip.qv.shape) extra_vars.append( Bunch(name="hgt", data=cmip["hgt"], dims=dims_2d, dtype="f", attributes=atts)) atts = Bunch(long_name="latitude", units="degrees") extra_vars.append( Bunch(name="lat", data=info.lat_data, dims=dims_2d, dtype="f", attributes=atts)) atts = Bunch(long_name="longitude", units="degrees") extra_vars.append( Bunch(name="lon", data=info.lon_data, dims=dims_2d, dtype="f", attributes=atts)) atts = Bunch(long_name="xland", units="") extra_vars.append( Bunch(name="xland", data=cmip["land"], dims=dims_2d, dtype="f", attributes=atts)) qvatts = Bunch(long_name="Specific Humidity", units="kg kg**-1") # write to output file mygis.write(filename=filename, varname="qv", data=cmip.qv, dims=dims, attributes=qvatts, dtype="f", extravars=extra_vars ) #,history=" Produced by cmip2icar v."+info.version)
def write_file(date,info,ccsm): """writes CCSM input data to a netcdf file""" filename=str(date).replace(" ","_") dims=("time","level","lat","lon") dims_3d=("time","lat","lon") dims_2d=("lat","lon") extra_vars=[] # 3D variables (+time) # cloud,ice,qv,u,v,t,p # 3D variables (constant in time) # z # 2D variables (+time) # latent_heat,PBL_height,sensible_heat # 2D variables (constant in time) # hgt, latitude, longitude atts=Bunch(long_name="Cloud liquid water content",units="kg kg**-1") extra_vars.append(Bunch(name="cloud",data=ccsm["cloud"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="Cloud ice water content",units="kg kg**-1") extra_vars.append(Bunch(name="ice",data=ccsm["ice"],dims=dims,dtype="f",attributes=atts)) # used as primary variable in io.write # atts=Bunch(long_name="Specific Humidity",units="kg kg**-1") # extra_vars.append(Bunch(name="qv",data=ccsm["qv"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="U (E/W) wind speed",units="m s**-1") extra_vars.append(Bunch(name="u",data=ccsm["u"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="V (N/S) wind speed",units="m s**-1") extra_vars.append(Bunch(name="v",data=ccsm["v"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="Potential Temperature",units="kg kg**-1") extra_vars.append(Bunch(name="theta",data=ccsm["t"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="Pressure",units="Pa") extra_vars.append(Bunch(name="p",data=ccsm["p"],dims=dims,dtype="f",attributes=atts)) atts=Bunch(long_name="Layer thicknesses",units="m") extra_vars.append(Bunch(name="dz",data=ccsm["dz"].astype("f"),dims=(dims[1],),dtype="f",attributes=atts)) atts=Bunch(long_name="Topographic Height",units="m") extra_vars.append(Bunch(name="hgt",data=ccsm["hgt"],dims=dims_2d,dtype="f",attributes=atts)) atts=Bunch(long_name="Surface Latent Heat flux (positive up)",units="W m**-2") extra_vars.append(Bunch(name="latent_heat",data=ccsm["latent_heat"],dims=dims_3d,dtype="f",attributes=atts)) atts=Bunch(long_name="Surface Sensible Heat flux (positive up)",units="W m**-2") extra_vars.append(Bunch(name="sensible_heat",data=ccsm["sensible_heat"],dims=dims_3d,dtype="f",attributes=atts)) atts=Bunch(long_name="latitude",units="degrees") extra_vars.append(Bunch(name="lat",data=info.lat_data,dims=dims_2d,dtype="f",attributes=atts)) atts=Bunch(long_name="longitude",units="degrees") extra_vars.append(Bunch(name="lon",data=info.lon_data,dims=dims_2d,dtype="f",attributes=atts)) qvatts=Bunch(long_name="Specific Humidity",units="kg kg**-1") # write to output file mygis.write(filename=filename,varname="qv",data=ccsm.qv,attributes=qvatts,dtype="f", extravars=extra_vars)#,history=" Produced by ccsm2icar v."+info.version)
def main(inputfile, sounding_file=None): filename = "icar_" + inputfile print(filename) yaxis = 2 yaxis2d = 1 u = mygis.read_nc(inputfile, "U").data.repeat(2, axis=yaxis) # w =mygis.read_nc(inputfile,"W").data.repeat(2,axis=yaxis) v = mygis.read_nc(inputfile, "V").data.repeat( 2, axis=yaxis )[:, :, : -1, :] # v has one extra cell in y, so when doubling ydim we have to remove gridcell qv = mygis.read_nc(inputfile, "QVAPOR").data.repeat(2, axis=yaxis) qc = mygis.read_nc(inputfile, "QCLOUD").data.repeat(2, axis=yaxis) qi = mygis.read_nc(inputfile, "QICE").data.repeat(2, axis=yaxis) th = mygis.read_nc(inputfile, "T").data.repeat(2, axis=yaxis) pb = mygis.read_nc(inputfile, "PB").data.repeat(2, axis=yaxis) p = mygis.read_nc(inputfile, "P").data.repeat(2, axis=yaxis) + pb phb = mygis.read_nc(inputfile, "PHB").data.repeat(2, axis=yaxis) ph = mygis.read_nc(inputfile, "PH").data.repeat(2, axis=yaxis) + phb hgt = mygis.read_nc(inputfile, "HGT").data.repeat(2, axis=yaxis2d) land = mygis.read_nc(inputfile, "XLAND").data.repeat(2, axis=yaxis2d) nt, nz, ny, nx = qv.shape print(nx, ny, nz) dims = np.array(qv.shape) z = (ph) / g dz = np.diff(z, axis=1) # dz shape = (time,nz-1,ny,nx) # ph/phb are defined between model levels, we want z in the middle of each model level # e.g. z[0]=0, but wrfz[0]=dz[0]/2 where dz[0]=z[1]-z[0] wrfz = (z[:, :-1, :, :] + z[:, 1:, :, :]) / 2 # p=units.z2p(wrfz,100000) # wrfz=np.zeros(dz.shape) # wrfz[:,0,...]=dz[:,0,...]/2+hgt # for i in range(1,nz): # wrfz[:,i,:,:]=(dz[:,i,:,:]+dz[:,i-1,:,:])/2+wrfz[:,i-1,:,:] mean_dz = dz[0, ...].mean(axis=1).mean(axis=1) print("MEAN LEVELS:") print("dz_levels=[") for i in range(0, nz, 10): curlist = [str(cur) for cur in mean_dz[i:i + 10]] print(",".join(curlist) + ",") if i + 10 < mean_dz.shape[0]: curlist = [str(cur) for cur in mean_dz[i + 10:]] print(",".join(curlist) + "]") else: print("]") print("FIRST LEVELS:") print("dz_levels=[") for i in range(0, nz, 10): curlist = [str(cur) for cur in dz[0, i:i + 10, 0, 0]] print(",".join(curlist) + ",") if i + 10 < dz.shape[1]: curlist = [str(cur) for cur in dz[0, i + 10:, 0, 0]] print(",".join(curlist) + "]") else: print("]") # dz=np.zeros(dz.shape)+mean_dz[np.newaxis,:,np.newaxis,np.newaxis] dz = np.zeros(dz.shape) + dz[:, :, :, np.newaxis, 0] z = np.zeros(dz.shape) z[:, 0, ...] = dz[:, 0, ...] / 2 + hgt for i in range(1, nz): z[:, i, :, :] = (dz[:, i, :, :] + dz[:, i - 1, :, :]) / 2 + z[:, i - 1, :, :] # adjust_p(p,wrfz,z) dx = mygis.read_attr(inputfile, "DX") if type(dx) == np.ndarray: dx = dx[0] dlon = dx / 111.1 dlat = dx / 111.1 lonmin = -110.0 lonmax = lonmin + nx * dlon latmin = 40.0 latmax = latmin + ny * dlat udims = copy(dims) udims[-1] += 1 vdims = copy(dims) vdims[-2] += 1 lon = np.arange(lonmin, lonmax, dlon)[:nx] lat = np.arange(latmin, latmax, dlat)[:ny] lon, lat = np.meshgrid(lon, lat) ulon = np.arange(lonmin - dlon / 2, lonmax + dlon / 2, dlon)[:nx + 1] ulat = np.arange(latmin, latmax, dlat)[:ny] ulon, ulat = np.meshgrid(ulon, ulat) vlon = np.arange(lonmin, lonmax, dlon)[:nx] vlat = np.arange(latmin - dlat / 2, latmax + dlat / 2, dlat)[:ny + 1] vlon, vlat = np.meshgrid(vlon, vlat) lat = lat.reshape((1, ny, nx)) lon = lon.reshape((1, ny, nx)) hgt = hgt.reshape((1, ny, nx)) d3dname = ("t", "z", "y", "x") ud3dname = ("t", "z", "y", "xu") ud2dname = ("t", "y", "xu") vd3dname = ("t", "z", "yv", "x") vd2dname = ("t", "yv", "x") d2dname = ("t", "y", "x") othervars = [ Bunch(data=v, name="V", dims=vd3dname, dtype="f", attributes=dict(units="m/s", description="Horizontal (y) wind speed")), # Bunch(data=w, name="W", dims=d3dname, dtype="f",attributes=dict(units="m/s", description="Vertical wind speed")), Bunch(data=qv, name="QVAPOR", dims=d3dname, dtype="f", attributes=dict(units="kg/kg", description="Water vapor mixing ratio")), Bunch(data=qc, name="QCLOUD", dims=d3dname, dtype="f", attributes=dict(units="kg/kg", description="Cloud water mixing ratio")), Bunch(data=qi, name="QICE", dims=d3dname, dtype="f", attributes=dict(units="kg/kg", description="Cloud ice mixing ratio")), Bunch(data=p * 0, name="P", dims=d3dname, dtype="f", attributes=dict(units="Pa", description="Pressure (perturbation)")), Bunch(data=p, name="PB", dims=d3dname, dtype="f", attributes=dict(units="Pa", description="Pressure (base)")), Bunch(data=th, name="T", dims=d3dname, dtype="f", attributes=dict(units="K", description="Potential temperature")), Bunch(data=dz, name="dz", dims=d3dname, dtype="f", attributes=dict(units="m", description="Layer thickness")), Bunch(data=z, name="Z", dims=d3dname, dtype="f", attributes=dict(units="m", description="Layer Height AGL (also ASL here)")), Bunch( data=wrfz, name="WRFZ", dims=d3dname, dtype="f", attributes=dict( units="m", description="Layer Height AGL (also ASL here) in the WRF input" )), # Bunch(data=z*0, name="PH", dims=d3dname, dtype="f",attributes=dict(units="m2/s2",description="Geopotential Height ASL (perturbation)")), # Bunch(data=z*g, name="PHB", dims=d3dname, dtype="f",attributes=dict(units="m2/s2",description="Geopotential Height ASL (base)")), Bunch(data=lat, name="XLAT", dims=d2dname, dtype="f", attributes=dict(units="deg", description="Latitude")), Bunch(data=lon, name="XLONG", dims=d2dname, dtype="f", attributes=dict(units="deg", description="Longitude")), Bunch(data=ulat, name="XLAT_U", dims=ud2dname, dtype="f", attributes=dict(units="deg", description="Latitude on U stagger")), Bunch(data=ulon, name="XLONG_U", dims=ud2dname, dtype="f", attributes=dict(units="deg", description="Longitude on U stagger")), Bunch(data=vlat, name="XLAT_V", dims=vd2dname, dtype="f", attributes=dict(units="deg", description="Latitude on V stagger")), Bunch(data=vlon, name="XLONG_V", dims=vd2dname, dtype="f", attributes=dict(units="deg", description="Longitude on V stagger")), Bunch(data=hgt, name="HGT", dims=d2dname, dtype="f", attributes=dict(units="m", description="Terrain Elevation")), Bunch(data=land, name="XLAND", dims=d2dname, dtype="f", attributes=dict(units="", description="Land Mask [1=land,2=water]")) ] fileexists = glob.glob(filename) or glob.glob(filename + ".nc") if fileexists: print("Removing : " + fileexists[0]) os.remove(fileexists[0]) mygis.write(filename, u, varname="U", dims=ud3dname, dtype="f", attributes=dict(units="m/s", description="Horizontal (x) wind speed"), extravars=othervars)
def main(): filename = "ideal_{}_{}".format(case_study, int(wind_speed)) print(filename) nx, nz, ny = master_dims[case_study] dims = [1, nz, ny, nx] # this is just arbitrary for now dlon = dx / 111.1 dlat = dx / 111.1 lonmin = -110.0 lonmax = lonmin + nx * dlon latmin = 40.0 latmax = latmin + ny * dlat base = Bunch(u=wind_speed, w=0.0, v=0.0, qv=0.0013, qc=0.0, p=100000.0, th=np.arange(273.0, 300, (300 - 273.0) / nz).reshape( (nz, 1, 1)), dz=100.0) base.z = np.arange(0, nz * base.dz, base.dz) if glob.glob("sounding.txt"): update_base(base, "sounding.txt", nz) nz = base.th.size dims = [1, nz, ny, nx] udims = copy(dims) udims[-1] += 1 vdims = copy(dims) vdims[-2] += 1 u = np.zeros(udims, dtype="f") + base.u w = np.zeros(dims, dtype="f") + base.w v = np.zeros(vdims, dtype="f") + base.v qv = np.zeros(dims, dtype="f") + base.qv qc = np.zeros(dims, dtype="f") + base.qc # simple topography = a cosine # coscurve=np.cos(np.arange(dims[3])/dims[3]*2*np.pi+np.pi)+1 # hgt=(coscurve*1000).reshape((1,nx)).repeat(ny,axis=0) hgt = build_topography(case_study, dims) lon = np.arange(lonmin, lonmax, dlon)[:nx] lat = np.arange(latmin, latmax, dlat)[:ny] lon, lat = np.meshgrid(lon, lat) ulon = np.arange(lonmin - dlon / 2, lonmax + dlon / 2, dlon)[:nx + 1] ulat = np.arange(latmin, latmax, dlat)[:ny] ulon, ulat = np.meshgrid(ulon, ulat) vlon = np.arange(lonmin, lonmax, dlon)[:nx] vlat = np.arange(latmin - dlat / 2, latmax + dlat / 2, dlat)[:ny + 1] vlon, vlat = np.meshgrid(vlon, vlat) dz = np.zeros(dims) + base.dz z = np.zeros(dims, dtype="f") + base.z.reshape( (1, nz, 1, 1)) + hgt.reshape((1, 1, ny, nx)) layer1 = (dz[0, :, :] / 2) z[0, :, :] += layer1 for i in range(1, int(nz)): z[:, i, :, :] = z[:, i - 1, :, :] + (dz[:, i - 1, :, :] + dz[:, i, :, :]) / 2.0 p = np.zeros(dims, dtype="f") + base.p adjust_p(p, 0.0, z) th = np.zeros(dims, dtype="f") + base.th lat = lat.reshape((1, ny, nx)) lon = lon.reshape((1, ny, nx)) hgt = hgt.reshape((1, ny, nx)) d3dname = ("t", "z", "y", "x") ud3dname = ("t", "z", "y", "xu") ud2dname = ("t", "y", "xu") vd3dname = ("t", "z", "yv", "x") vd2dname = ("t", "yv", "x") d2dname = ("t", "y", "x") g = 9.81 othervars = [ Bunch(data=v, name="V", dims=vd3dname, dtype="f", attributes=dict(units="m/s", description="Horizontal (y) wind speed")), Bunch(data=w, name="W", dims=d3dname, dtype="f", attributes=dict(units="m/s", description="Vertical wind speed")), Bunch(data=qv, name="QVAPOR", dims=d3dname, dtype="f", attributes=dict(units="kg/kg", description="Water vapor mixing ratio")), Bunch(data=qc, name="QCLOUD", dims=d3dname, dtype="f", attributes=dict(units="kg/kg", description="Cloud water mixing ratio")), Bunch(data=qc, name="QICE", dims=d3dname, dtype="f", attributes=dict(units="kg/kg", description="Cloud ice mixing ratio")), Bunch(data=p * 0, name="P", dims=d3dname, dtype="f", attributes=dict(units="Pa", description="Pressure (perturbation)")), Bunch(data=p, name="PB", dims=d3dname, dtype="f", attributes=dict(units="Pa", description="Pressure (base)")), Bunch(data=th - 300, name="T", dims=d3dname, dtype="f", attributes=dict(units="K", description="Potential temperature")), Bunch(data=dz, name="dz", dims=d3dname, dtype="f", attributes=dict(units="m", description="Layer thickness")), Bunch(data=z, name="Z", dims=d3dname, dtype="f", attributes=dict(units="m", description="Layer Height AGL (also ASL here)")), Bunch(data=z * 0, name="PH", dims=d3dname, dtype="f", attributes=dict( units="m2/s2", description="Geopotential Height ASL (perturbation)")), Bunch(data=z * g, name="PHB", dims=d3dname, dtype="f", attributes=dict(units="m2/s2", description="Geopotential Height ASL (base)")), Bunch(data=lat, name="XLAT", dims=d2dname, dtype="f", attributes=dict(units="deg", description="Latitude")), Bunch(data=lon, name="XLONG", dims=d2dname, dtype="f", attributes=dict(units="deg", description="Longitude")), Bunch(data=ulat, name="XLAT_U", dims=ud2dname, dtype="f", attributes=dict(units="deg", description="Latitude on U stagger")), Bunch(data=ulon, name="XLONG_U", dims=ud2dname, dtype="f", attributes=dict(units="deg", description="Longitude on U stagger")), Bunch(data=vlat, name="XLAT_V", dims=vd2dname, dtype="f", attributes=dict(units="deg", description="Latitude on V stagger")), Bunch(data=vlon, name="XLONG_V", dims=vd2dname, dtype="f", attributes=dict(units="deg", description="Longitude on V stagger")), Bunch(data=hgt, name="HGT", dims=d2dname, dtype="f", attributes=dict(units="m", description="Terrain Elevation")) ] fileexists = glob.glob(filename) or glob.glob(filename + ".nc") if fileexists: print("Removing : " + fileexists[0]) os.remove(fileexists[0]) io.write(filename, u, varname="U", dims=ud3dname, dtype="f", attributes=dict(units="m/s", description="Horizontal (x) wind speed"), extravars=othervars)
def write_file(date, info, erai): """writes ERAi input data to a netcdf file""" filename = str(date).replace(" ", "_") dims = ("time", "level", "lat", "lon") dims2dt = ("time", "lat", "lon") extra_vars = [] # 3D variables # cloud,ice,qv,u,v,t,p, z # 2D variables # hgt,latent_heat,PBL_height,sensible_heat,sfc_hgt (sfc_hgt not used currently should be ~the same as hgt) # 1D variables / coordinates # lat, lon atts = Bunch(long_name="Cloud liquid water content", units="kg kg**-1") extra_vars.append( Bunch(name="cloud", data=erai["cloud"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="Cloud ice water content", units="kg kg**-1") extra_vars.append( Bunch(name="ice", data=erai["ice"], dims=dims, dtype="f", attributes=atts)) # used as primary variable in io.write # atts=Bunch(long_name="Specific Humidity",units="kg kg**-1") # extra_vars.append(Bunch(name="qv",data=erai["qv"],dims=dims,dtype="f",attributes=atts)) atts = Bunch(long_name="U (E/W) wind speed", units="m s**-1") extra_vars.append( Bunch(name="u", data=erai["u"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="V (N/S) wind speed", units="m s**-1") extra_vars.append( Bunch(name="v", data=erai["v"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="Potential Temperature", units="K") extra_vars.append( Bunch(name="theta", data=erai["t"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="Pressure", units="Pa") extra_vars.append( Bunch(name="p", data=erai["p"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="Atmospheric Elevation", units="m", positive="up") extra_vars.append( Bunch(name="z", data=erai["z"], dims=dims, dtype="f", attributes=atts)) atts = Bunch(long_name="Topographic Height", units="m") extra_vars.append( Bunch(name="hgt", data=erai["hgt"], dims=dims[2:], dtype="f", attributes=atts)) atts = Bunch(long_name="Surface solar radiation (downwards)", units="W m**-2") extra_vars.append( Bunch(name="swdown", data=erai["sw"], dims=dims2dt, dtype="f", attributes=atts)) atts = Bunch(long_name="Surface longwave radiation (downwards)", units="W m**-2") extra_vars.append( Bunch(name="lwdown", data=erai["lw"], dims=dims2dt, dtype="f", attributes=atts)) atts = Bunch(long_name="Surface Latent Heat flux (positive up)", units="W m**-2") extra_vars.append( Bunch(name="latent_heat", data=erai["latent_heat"], dims=dims2dt, dtype="f", attributes=atts)) atts = Bunch(long_name="Surface Sensible Heat flux (positive up)", units="W m**-2") extra_vars.append( Bunch(name="sensible_heat", data=erai["sensible_heat"], dims=dims2dt, dtype="f", attributes=atts)) atts = Bunch(long_name="Planetary Boundary Layer Height", units="m") extra_vars.append( Bunch(name="PBL_height", data=erai["PBL_height"], dims=dims2dt, dtype="f", attributes=atts)) atts = Bunch(long_name="Skin Temperature", units="K") extra_vars.append( Bunch(name="tskin", data=erai["tskin"], dims=dims2dt, dtype="f", attributes=atts)) atts = Bunch(long_name="latitude", units="degrees_north") extra_vars.append( Bunch(name="lat", data=info.lat_data[:, 0], dims=("lat", ), dtype="f", attributes=atts)) atts = Bunch(long_name="longitude", units="degrees_east") extra_vars.append( Bunch(name="lon", data=info.lon_data[0, :], dims=("lon", ), dtype="f", attributes=atts)) time_since_1900 = date - datetime.datetime(1900, 1, 1, 0, 0, 0) time = time_since_1900.days + np.float64(time_since_1900.seconds / 86400.0) atts = Bunch(long_name="time", units="days since 1900-01-01", calendar="gregorian") extra_vars.append( Bunch(name="time", data=time, dims=(dims[0], ), dtype="d", attributes=atts)) qvatts = Bunch(long_name="Specific Humidity", units="kg kg**-1") # write to output file mygis.write(filename=filename, varname="qv", data=erai.qv, attributes=qvatts, dtype="f", dims=dims, extravars=extra_vars, history=" Produced by erai2icar v." + info.version)
def write(filename, data): mygis.write(filename,data)
def main(): filename="init" nx,nz,ny=(20.,10.,19) dims=[nx,nz,ny] # po=np.log(100000.0) # p1=np.log(50000.0) # dp=(po-p1)/nz # p=np.exp(np.arange(po,p1,-dp)), lonmin=-110.0; lonmax=-100.0; dlon=(lonmax-lonmin)/nx latmin=35.0; latmax=45.0; dlat=(latmax-latmin)/ny base=Bunch(u=10.0,w=0.0,v=0.0, qv=0.0013,qc=0.0, p=100000.0, th=np.arange(273.0,300,(300-273.0)/nz), dz=400.0) base.z=np.arange(0,nz*base.dz,base.dz) if glob.glob("sounding.txt"): update_base(base,"sounding.txt",nz) nz=base.th.size dims=[nx,nz,ny] # base.p=base.p[:nz] u=np.zeros(dims,dtype="f")+base.u w=np.zeros(dims,dtype="f")+base.w v=np.zeros(dims,dtype="f")+base.v qv=np.zeros(dims,dtype="f")+base.qv # qv+=np.sin(np.arange(dims[1])/dims[1]*2*np.pi)+1 qc=np.zeros(dims,dtype="f")+base.qc coscurve=np.cos(np.arange(dims[0])/dims[0]*2*np.pi+np.pi)+1 # p-=coscurve.reshape((nx,1,1))*15000 hgt=(coscurve*1000).reshape((nx,1)).repeat(ny,axis=1) lon=np.arange(lonmin,lonmax,dlon) lat=np.arange(latmin,latmax,dlat) lat,lon=np.meshgrid(lat,lon) #note that this appears "backwards" but these are C-style, fortran will be reversed dz=np.zeros(dims)+base.dz z=np.zeros(dims,dtype="f")+base.z.reshape((1,nz,1))+hgt.reshape((nx,1,ny)) layer1=(dz[:,0,:]/2) z[:,0,:]+=layer1 for i in range(1,int(nz)): z[:,i,:]=z[:,i-1,:]+(dz[:,i-1,:]+dz[:,i,:])/2.0 p=np.zeros(dims,dtype="f")+base.p# .reshape((1,nz,1)) adjust_p(p,0.0,z) th=np.zeros(dims,dtype="f")+base.th.reshape((1,nz,1)) d3dname=("x","z","y") d2dname=("x","y") othervars=[Bunch(data=lat,name="XLAT", dims=d2dname,dtype="f",attributes=dict(units="deg", description="Latitude")), Bunch(data=lon,name="XLONG",dims=d2dname,dtype="f",attributes=dict(units="deg", description="Longitude")), Bunch(data=v, name="v", dims=d3dname,dtype="f",attributes=dict(units="m/s", description="Horizontal (y) wind speed")), Bunch(data=w, name="w", dims=d3dname,dtype="f",attributes=dict(units="m/s", description="Vertical wind speed")), Bunch(data=qv, name="qv", dims=d3dname,dtype="f",attributes=dict(units="kg/kg",description="Water vapor mixing ratio")), Bunch(data=qc, name="qc", dims=d3dname,dtype="f",attributes=dict(units="kg/kg",description="Cloud water mixing ratio")), Bunch(data=p, name="p", dims=d3dname,dtype="f",attributes=dict(units="Pa", description="Pressure")), Bunch(data=th, name="th", dims=d3dname,dtype="f",attributes=dict(units="K", description="Potential temperature")), Bunch(data=dz, name="dz", dims=d3dname,dtype="f",attributes=dict(units="m", description="Layer thickness")), Bunch(data=z, name="z", dims=d3dname,dtype="f",attributes=dict(units="m", description="Layer Height AGL")), Bunch(data=hgt,name="hgt", dims=d2dname,dtype="f",attributes=dict(units="m", description="Terrain Elevation")) ] print(filename) fileexists=glob.glob(filename) or glob.glob(filename+".nc") if fileexists: print("Removing : "+fileexists[0]) os.remove(fileexists[0]) print(u.shape) print(lat.shape) print(hgt.shape) print(lon.shape) io.write(filename, u,varname="u", dims=d3dname,dtype="f",attributes=dict(units="m/s",description="Horizontal (x) wind speed"), extravars=othervars)
def main(): filename = "bc" nx, ny, nz, nt = (20., 20, 10, 24) dims = [nt, nz, ny, nx] lonmin = -110.0 lonmax = -100.0 dlon = (lonmax - lonmin) / nx latmin = 35.0 latmax = 45.0 dlat = (latmax - latmin) / ny base = Bunch(u=10.0, w=0.0, v=0.0, qv=0.0013, qc=0.0, p=100000.0, th=np.arange(273.0, 300, (300 - 273.0) / nz).reshape( (1, nz, 1, 1)), dz=400.0) base.z = np.arange(0, nz * base.dz, base.dz) if glob.glob("sounding.txt"): update_base(base, "sounding.txt", nz) nz = base.th.size dims = [nt, nz, ny, nx] u = np.zeros(dims, dtype="f") + base.u w = np.zeros(dims, dtype="f") + base.w v = np.zeros(dims, dtype="f") + base.v qv = np.zeros(dims, dtype="f") + base.qv qc = np.zeros(dims, dtype="f") + base.qc coscurve = np.cos(np.arange(dims[2]) / dims[2] * 2 * np.pi + np.pi) + 1 hgt = (coscurve * 1000).reshape((1, nx)).repeat(ny, axis=0) lon = np.arange(lonmin, lonmax, dlon) lat = np.arange(latmin, latmax, dlat) lon, lat = np.meshgrid(lon, lat) dz = np.zeros(dims) + base.dz z = np.zeros(dims, dtype="f") + base.z.reshape( (1, nz, 1, 1)) + hgt.reshape((1, 1, ny, nx)) layer1 = (dz[0, 0, :, :] / 2) z[0, 0, :, :] += layer1 for i in range(1, int(nz)): z[:, i, :, :] = z[:, i - 1, :, :] + (dz[:, i - 1, :, :] + dz[:, i, :, :]) / 2.0 p = np.zeros(dims, dtype="f") + base.p adjust_p(p, 0.0, z) th = np.zeros(dims, dtype="f") + base.th d4dname = ("t", "z", "y", "x") d3dname = ("z", "y", "x") d2dname = ("y", "x") othervars = [ Bunch(data=v, name="V", dims=d4dname, dtype="f", attributes=dict(units="m/s", description="Horizontal (y) wind speed")), Bunch(data=w, name="W", dims=d4dname, dtype="f", attributes=dict(units="m/s", description="Vertical wind speed")), Bunch(data=qv, name="QVAPOR", dims=d4dname, dtype="f", attributes=dict(units="kg/kg", description="Water vapor mixing ratio")), Bunch(data=qc, name="QCLOUD", dims=d4dname, dtype="f", attributes=dict(units="kg/kg", description="Cloud water mixing ratio")), Bunch(data=p, name="P", dims=d4dname, dtype="f", attributes=dict(units="Pa", description="Pressure")), Bunch(data=th, name="T", dims=d4dname, dtype="f", attributes=dict(units="K", description="Potential temperature")), Bunch(data=dz, name="dz", dims=d4dname, dtype="f", attributes=dict(units="m", description="Layer thickness")), Bunch(data=z, name="Z", dims=d4dname, dtype="f", attributes=dict(units="m", description="Layer Height AGL")), Bunch(data=lat, name="XLAT", dims=d2dname, dtype="f", attributes=dict(units="deg", description="Latitude")), Bunch(data=lon, name="XLONG", dims=d2dname, dtype="f", attributes=dict(units="deg", description="Longitude")), Bunch(data=hgt, name="HGT", dims=d2dname, dtype="f", attributes=dict(units="m", description="Terrain Elevation")) ] fileexists = glob.glob(filename) or glob.glob(filename + ".nc") if fileexists: print("Removing : " + fileexists[0]) os.remove(fileexists[0]) io.write(filename, u, varname="U", dims=d4dname, dtype="f", attributes=dict(units="m/s", description="Horizontal (x) wind speed"), extravars=othervars)