def main():
    snotel=load_data.cols("snotel_stats.txt")
    ghcn=load_data.cols("ghcn_comparison.txt")

    fig=plt.figure(figsize=(20,15),dpi=50)
    plot_dataset(snotel)
    plt.savefig("snotel_plots.png")
    plt.close()
    
    plt.figure(figsize=(20,15),dpi=50)
    plot_dataset(ghcn)
    plt.savefig("ghcn_plots.png")
    plt.close()
def update_worldfile(worldfile,variable, mapfile,clumpfile,gain=False,delta=False):
	mapdata=load_data.cols(mapfile)
	clumps=load_data.cols(clumpfile)
	
	# set the inital state to be out of a patch
	inpatch=False
	
	# open the outputfile
	fo=open('tempworldfile_'+variable,'w')
	#loop through the input file replacing values as feasible
	with open(worldfile,'r') as f:
		for line in f: 
			# if we have already found that we are within a valid clump/patch
			if inpatch:
				# search for the variable of interest, if it matches, replace
 				# the value and set the state as out of the clump/patch
				if re.match('^ *-*[0-9]?\.*[0-9]* *'+variable+' *$'):
					if delta:
						thisvalue=np.float(line.split()[0])
						fo.write('                '+str(curvalue+thisvalue)+'     '+variable+'\n')
					elif gain:
						thisvalue=np.float(line.split()[0])
						fo.write('                '+str(curvalue*thisvalue)+'     '+variable+'\n')
					else:
						fo.write('                '+str(curvalue)+'     '+variable+'\n')
						
					inpatch=False
				else:
					# if we are not at the variable of interest just write the
					# current line to the outputfile
					fo.write(line)
			# if we are on a line that looks like a patch_ID:
			elif re.match('^ *[0-9]? *patch_ID *$'):
				# get the patchID
				patchID=np.int(line.split()[0])
				# find the patchID locations in the clump file
				tmp=where(clumps==patchID)
				# calculate the mean value at those locations in the mapfile
				curvalue=np.mean(mapdata[tmp])
				# and write the current line to the output file
				fo.write(line)
				# then set the state to be in a patch
				inpatch=True
			else:
				# else we didn't match any special cases, just write the 
				# current line to the output file
				fo.write(line)
			
		
	# close the output file (input file is automagically closed)
	fo.close()
def read_data(directory, ensemble):
    """docstring for read_data"""
    files=glob.glob(directory+"/*{}.txt".format(ensemble))
    outputdata=None
    for f in files:
        try:
            if outputdata==None:
                outputdata=load_data.cols(f)
            else:
                newdata=load_data.cols(f)
                outputdata=np.concatenate([newdata,outputdata])
        except ValueError:
            pass # file probably had nothing in it. 
    return outputdata
Beispiel #4
0
def update_base(base,filename,nz):
    data=load_data.cols(filename)
    nz=min(data.shape[0]-1,nz)
    base.z=data[:nz,0]
    base.dz=np.diff(data[:nz+1,0]).reshape((nz,1,1))
    base.th=data[:nz,1].reshape((nz,1,1))
    base.qv=data[:nz,2].reshape((nz,1,1))/1000.0
Beispiel #5
0
def update_base(base, filename, nz):
    data = load_data.cols(filename)
    nz = min(data.shape[0] - 1, nz)
    base.z = data[:nz, 0]
    base.dz = np.diff(data[:nz + 1, 0]).reshape((nz, 1, 1))
    base.th = data[:nz, 1].reshape((nz, 1, 1))
    base.qv = data[:nz, 2].reshape((nz, 1, 1)) / 1000.0
def daily2monthly(dailyfilename, monthlyfilename):
	d=load_data.cols(dailyfilename)
	outputdata=d.copy()
	i=0
	iout=0
	nout=0
	while (i<len(d[:,0])):
		curyear=d[i,2]
		curmonth=d[i,1]
		outputdata[iout,0:3]=np.array([0,curmonth,curyear])
		nout=0
		in_month=True
		while (i<len(d[:,0])) & in_month:
			in_month=((d[i,2] == curyear) & (d[i,1]==curmonth))
			if in_month:
				outputdata[iout,3:]+=d[i,3:]
				nout+=1
				i+=1
		# convert accumulations to averages
		outputdata[iout,3:]/=nout
		# convert water balance terms back to accumulations instead of averages. 
		outputdata[iout,[5,10,11,12,13,15,16,17,18,30,34]]*=nout
		iout+=1
	
	finaloutput=outputdata[0:iout,:]
	np.savetxt(monthlyfilename,finaloutput,fmt='%f')
def make_master(filename, masterfile=None):
    
    if masterfile==None:
        masterfile='master'+filename+'.txt'
    spacefile=convertcommas2spaces(filename)
    data=load_data.cols(spacefile)
    os.remove(spacefile)
    
    mjds=fetch_mjd(data)
    data=make_even_dates(mjds,data)
    # np.savetxt(master,data)
    write_masterfile(masterfile,data[:,:9])
Beispiel #8
0
def update_base(base, filename, nz):
    """update the base information using data from a sounding file
    
    filename should be a space delimited text file with 3 columns
        height [m], potential temperature [K], and specific humidity [g/kg]"""
    print("Using Sounding from : " + filename)
    data = load_data.cols(filename)
    nz = min(data.shape[0] - 1, nz)
    base.z = data[:nz, 0]
    base.dz = np.diff(data[:nz + 1, 0]).reshape((1, nz, 1, 1))
    base.th = data[:nz, 1].reshape((1, nz, 1, 1))
    base.qv = data[:nz, 2].reshape((1, nz, 1, 1)) / 1000.0
Beispiel #9
0
def update_base(base,filename,nz):
    """update the base information using data from a sounding file
    
    filename should be a space delimited text file with 3 columns
        height [m], potential temperature [K], and specific humidity [g/kg]"""
    print("Using Sounding from : "+filename)
    data=load_data.cols(filename)
    nz=min(data.shape[0]-1,nz)
    base.z=data[:nz,0]
    base.dz=np.diff(data[:nz+1,0]).reshape((1,nz,1,1))
    base.th=data[:nz,1].reshape((1,nz,1,1))
    base.qv=data[:nz,2].reshape((1,nz,1,1))/1000.0
def main (filename,outputfile):
    
    d=load_data.cols(filename)
    filldates(d)
    filldata(d)
    writedata(d,outputfile)
def main(datafile, keyfile, badfile=None,topp=False):
    '''
    cleanup soil moisture data, fill bad data, add QC column
    
    to edit: 
        Temperature = [C] or [K]? (currently [C] : line 265 )
        date = excel or mjd or ... (currently excel : line 258)
    
    process_soil takes a datafile and a keyfile as input (and optionally a badfile)
    The data file should be a column formatted file with 
        Column 1 = Modified Julian Day or excel date (must modify code)
        Column 2-n = soil moisture and temperature (others ignored)
    The key file should be a column formatted file too with:
        Column 1: Instrument number (must be a number, not Judd 7-2 or even 7-2)
        Column 2: Soil Moisture Probe Column number (0 based)
        Column 3: Soil Temperature Probe Column number (0 based)
        Column 4: Soil Moisture - Temperature calibration (cm3/cm3/C)
        Column 5: Sensor group, 1=10cm 2=30cm 3=60cm (but could be used for any subsets)
        
    badfile is also column formatted specifying beginning and ending dates of bad data periods
    
    Soil moisture data is first roughly QAQCed by removing spikes, and data values outside
    predifined thresholds.  
    '''
    
    # load the data files or fail
    try:
        data=load_data.cols(datafile,dtype='d')
        keys=load_data.cols(keyfile)
    except IOError: 
        print("Badly formed input data file")
        return
    # data[:,3]=-9999
    # remove any rows in which the date is negative
    tmp=where(data[:,0]>0)[0]
    if len(tmp)<10: 
        print("No valid dates in file")
        return
    data=data[tmp,:]
    
    # create datetime objects for use plotting and printing "pretty"
    dtimes=data[:,0] #if dates are in mjd this is all you need
    # dtimes=date_fun.excel2mjd(dtimes) #if dates are in excel format, convert them to mjd
    # dtimes=make_times(data[:,0:5]) #if dates are in year,month,day,hour,minute convert them to mjd
    dates=date_fun.mjd2datetime(dtimes,roundseconds=True)
    # topp=True
    
    # set up an array to hold all of the soil moisture data
    allsmc=np.zeros((len(dtimes),len(keys[:,0])))
    # loop over all soil moisture columns defined in the keyfile
    for i in range(len(keys[:,0])):
        # grab the current soil moisture and temperature data 
        smc=data[:,keys[i,1]]
        if topp==True:
            print("Applying Topp calibration")
            smc=-5.3e-2+2.92e-2*smc-5.5e-4*smc**2+4.3e-6*smc**3
        # data[:,keys[i,2]]-=273.15
        tsoil=data[:,keys[i,2]]
        if np.median(tsoil)>200:
            tsoil-=273.15
        # find where the soil temperature is good and apply a temperature correction
        # to the soil moisture data
        tmp=where(tsoil >-100)[0]
        smc[tmp]=smc[tmp]-((tsoil[tmp]-10)*keys[i,3])
        # perform some minimal cleanup on the soil data (remove spikes and out of bounds)
        cleanup(smc)
        
        remove_frozen(smc,tsoil)
        
        if badfile != None:
            baddata=load_data.cols(badfile)
            if baddata.shape[1]>3:
                badtimes=np.array([make_times(baddata[:,1:6]),make_times(baddata[:,6:])]).T
            else:
                badtimes=baddata[:,1:]
            baddays=where(baddata[:,0] == keys[i,0])
            if len(baddays) >0 :
                for thisday in baddays:
                    remove_baddays(smc,dtimes,badtimes[thisday,:])
        
        allsmc[:,i]=smc
        
    # create a QC mask before fixing the bad soil moisture values
    # allsmc[where(allsmc<0.0005)]=-9999
    allmask=np.ones(allsmc.shape)
    allmask[where(allsmc<0)]=0
    # now fix bad soil moisture values
    fix_badsmc(allsmc)
    # use a median filter to remove small spikes. 
    # smooth_smc(allsmc)
    
    # make all PDF plots
    make_plots(allsmc,dates,keys)
    # write output file (could be moved to a subroutine)
    with open(datafile+'_all.csv','w') as f:
        f.write('   Date      Time  '+',     Soil Temp.,  Moisture, QCflag     '*len(keys[:,0])+'\n')
        f.write('yyyy-mm-dd hh:mm:ss'+',         (C),   (   cm), (0=bad 1=good)'*len(keys[:,0])+'\n')
        
        for j in range(len(allsmc[:,0])):
            f.write(str(dates[j])+',')
            for i in range(len(keys[:,0])):
                f.write('     %8.3f,  %7.3f,   %3i,         ' %(data[j,keys[i,2]],allsmc[j,i],allmask[j,i]))
            f.write('\n')
def main(datafile, keyfile, calfile,badfile,inches=False,Farenheight=False,Chimney_2011=False):
    data=load_data.cols(datafile,dtype='d')
    keys=load_data.cols(keyfile)
    if badfile!=None:
        baddata=load_data.cols(badfile)
        badtimes=np.array((make_times(baddata[:,1:6]),make_times(baddata[:,6:]))).T
    
    (vtimes,val)=load_data.cols_date(calfile,year=0,month=1,day=2,hour=3,minute=4)
    
    vtimes=vtimes[1:]
    # dtimes=date_fun.excel2mjd(data[:,0])
    dtimes=data[:,0]
    dates=date_fun.mjd2datetime(dtimes,roundseconds=True)
    
    allsnow=np.zeros((len(dtimes),len(keys[:,0])))
    allmask=np.zeros((len(dtimes),len(keys[:,0])))
    fig=plt.figure()
    
    for i in range(len(keys[:,0])):
        if keys[i,1]>0:
            snow=data[:,keys[i,1]]
        if keys[i,3]>0: 
            snow=data[:,keys[i,3]]/6.02
        if keys[i,2]>0:
            snow=100-data[:,keys[i,2]]
        if keys[i,4]>0:
            if Farenheight:
                print("Working in Farenheight")
                airt=(data[:,keys[i,4]]-32)/1.8+273.15
            else: 
                print("we shouldn't get here for chimney park")
                airt=data[:,keys[i,4]]+273.15
            tmp=np.where(airt<0)
            if len(tmp[0])>0:
                airt[tmp]=9999
                snow[tmp]=-9999
            if not Chimney_2011:
                print("we shouldn't get here for chimney park")
                snow*=sqrt(airt/273.15)
        if inches and not Chimney_2011:
            print("we shouldn't get here for chimney park")
            print("Working in Inches")
            snow*=2.54

        if Chimney_2011:
            print("Correcting Chimney Park")
            snow*=25.4 # "snow" is actually time that was mistakenly converted to inches from mm on the datalogger.
            snow*=0.3314/2 #0.3314 cm/microsecond = speed of sound in air at 0C (air temperature correction follows)
            snow*=sqrt(airt/273.15)
    
        # dists=snow.copy()
        mask=np.ones(len(snow))
        tmp=where(snow<50)
        snow=200-snow
        if len(tmp[0])>0:
            snow[tmp]=-99
            mask[tmp]=0
        
        thesekeys=where(val[0,:] == keys[i,0])
        curval=val[1:,thesekeys]
        curval.shape=(len(curval))
        good_val=where(curval >= 0)
        # this was used for 2010 data from one site where it needed to be cleaned up before calibration
        # if i==1:snow=simple_cleanup(snow,mask) 
        
        snow=apply_calib(dtimes, snow,vtimes[good_val[0]],curval[good_val[0]])
        snow=simple_cleanup(snow,mask)
        tmp=where(snow<-10)
        if len(tmp[0])>0:
            snow[tmp]=np.nan
            plt.plot(dates,snow)
            snow[tmp]=-99
            mask[tmp]=0
        else:
            plt.plot(dates,snow)
        plt.xlabel("Date")
        plt.ylabel("Snow Depth (cm)")
        fig.autofmt_xdate()
        fig.savefig('snow_depth_'+str(int(keys[i,0]))+'.pdf')
        fig.clf()
        
        if badfile!=None:
            baddays=where(baddata[:,0] == keys[i,0])
            if len(baddays) >0 :
                for thisday in baddays:
                    remove_baddays(snow,dtimes,badtimes[thisday,:],mask)
        
        allmask[:,i]=mask
        allsnow[:,i]=snow
    
    # allsnow[:,-1]=0
    # allmask[:,-1]=0
    tmp=where(allmask==0)
    if len(tmp[0])>0:allsnow[tmp]=-99
    # fix_badsnow(allsnow[:,:-1],allmask[:,:-1])
    fix_badsnow(allsnow,allmask)
    allsnow[where(allsnow<0)]=0

    # tmp=where(dtimes >julday.mjul_day(2011,6,21,0,0,0))[0]
    # allsnow[tmp[0]:,:]=0
    
    morebaddata=where(~np.isfinite(allsnow))
    if len(morebaddata[0])>0:
        allsnow[morebaddata]=-99
    smooth_snow(allsnow)

    for i in range(len(allsnow[0,:])):
        plt.plot(dates,allsnow[:,i])
        plt.xlabel("Date")
        plt.ylabel("Snow Depth (cm)")
        fig.autofmt_xdate()
        fig.savefig('fixed_snow_depth_'+str(int(keys[i,0]))+'.pdf')
        fig.clf()
    
    
    for i in range(len(keys[:,0])):
        with open('snow_depth_'+str(int(keys[i,0]))+'.txt', 'w') as f:
            f.write('   Date      Time    Snow Depth    QCflag\n')
            f.write('yyyy-mm-dd hh:mm:ss     (cm)    (0=bad,1=good)\n')
            [f.write(str(dates[j])+'      '+str(round(allsnow[j,i]*100)/100.0)+'          '+
            str(int(allmask[j,i]))+'\n') for j in range(len(snow))]
    
    with open('snow_depth_all.csv','w') as f:
        f.write('   Date      Time  ')
        for i in range(len(keys[:,0])):
            f.write(', Snow Depth,   QCflag     ')
        f.write('\n')
        
        f.write('yyyy-mm-dd hh:mm:ss')
        for i in range(len(keys[:,0])):
            f.write(',    (cm),   (0=bad 1=good)')
        f.write('\n')

        for j in range(len(snow)):
            f.write(str(dates[j])+',')
            for i in range(len(keys[:,0])):
                f.write('     '+str(round(100*allsnow[j,i])/100.0)+',         '+
                        str(int(allmask[j,i]))+',        ')
            f.write('\n')
    

    
    color_vals=[(1,0,0),(0,1,0),(0,0,1),(1,1,0),(1,0,1),(0,1,1),(0.5,1,0),(0.5,0,1),(0,0.5,1),(1,0.5,0),(1,0,0.5),(0,1,0.5)]
    for i in range(len(keys[:,0])):
        plt.plot(dates,allsnow[:,i],label='Judd-'+str(int(keys[i,0])),lw=0.5,color=color_vals[i])
        
    plt.plot(dates, np.mean(allsnow,axis=1),lw=3.0,label='Mean',color='k')
    # import pdb; pdb.set_trace()
    i=len(keys[:,0])+1
    plt.plot(dates, np.mean(allsnow[:,where(keys[:,5] ==1)[0]],axis=1),lw=2.0,ls='dashed',label='Open',color=color_vals[i])
    i=i+1
    plt.plot(dates, np.mean(allsnow[:,where(keys[:,5] ==2)[0]],axis=1),lw=2.0,ls='dashed',label='Sub-Canopy',color=color_vals[i])
    plt.legend(ncol=4,prop=font_prop(size=11),loc=2)
    plt.xlabel("Date")
    plt.ylabel("Snow Depth (cm)")
    fig.autofmt_xdate()
    plt.ylim(0,200)
    plt.show()
    fig.savefig('plot_summary.pdf')
#!/usr/bin/env python
from matplotlib.pyplot import plot,ylabel,legend,clf,figure,xlim,ylim,yscale,xscale
from numpy import zeros
import numpy as np
import load_data, date_fun
import sys

useredgrey=True

prefix='chimney2d_realveg'
green=load_data.cols(prefix+'_basin.daily')
red=load_data.cols(prefix+'_upveg_basin.daily')
grey=load_data.cols('chimney2d_allpine_basin.daily')

greenmjd=date_fun.date2mjd(green[:,2],green[:,1],green[:,0],zeros(len(green[:,0])),zeros(len(green[:,0])),zeros(len(green[:,0])))
greendates=date_fun.mjd2datetime(greenmjd);
cur=red
mjd=date_fun.date2mjd(cur[:,2],cur[:,1],cur[:,0],zeros(len(cur[:,0])),zeros(len(cur[:,0])),zeros(len(cur[:,0])))
reddates=date_fun.mjd2datetime(mjd);
cur=grey
mjd=date_fun.date2mjd(cur[:,2],cur[:,1],cur[:,0],zeros(len(cur[:,0])),zeros(len(cur[:,0])),zeros(len(cur[:,0])))
greydates=date_fun.mjd2datetime(mjd);

curfig=figure()

label1='Real'
label2='Real*3'
label3='Homog'

##############################################
# PLOT Snow Water Equivalent