def l6qc(cf,ds5,z): ds6 = copy.deepcopy(ds5) ds6.globalattributes['nc_level'] = 'L6' if (qcutils.cfkeycheck(cf,Base='Functions',ThisOne='L6_offline') and cf['Functions']['L6_offline'] == 'True') and qcutils.cfkeycheck(cf,Base='Functions',ThisOne='L6_keys'): try: ds6.globalattributes['L6Functions'] = ds6.globalattributes['L6Functions']+', '+cf['Functions']['L6_keys'] except: ds6.globalattributes['L6Functions'] = cf['Functions']['L6_keys'] z=z+1 # run MOST (Buckingham Pi) 2d footprint model (Kljun et al. 2004) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='footprint') and cf['Functions']['footprint'] == 'True': try: ds6.globalattributes['L6Functions'] = ds6.globalattributes['L6Functions']+', footprint' except: ds6.globalattributes['L6Functions'] = 'footprint' qcts.do_footprint_2d(cf,ds6,datalevel='L6') try: ds6.globalattributes['Functions'] = ds6.globalattributes['Functions'] + ', ' + ds6.globalattributes['L6Functions'] except: ds6.globalattributes['Functions'] = '' # re-apply the quality control checks (range, diurnal and rules) if z > 0: log.info(' Doing QC checks on L6 data') qcck.do_qcchecks(cf,ds6) try: ds6.globalattributes['L6Functions'] = ds6.globalattributes['L6Functions']+', do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)' except: ds6.globalattributes['L6Functions'] = 'do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)' return ds6,z
def CoordinateFluxGaps(cf,ds,Fc_in='Fc',Fe_in='Fe',Fh_in='Fh'): if not qcutils.cfoptionskey(cf,Key='CoordinateFluxGaps'): return if qcutils.cfkeycheck(cf,Base='FunctionArgs',ThisOne='gapsvars'): vars = ast.literal_eval(cf['FunctionArgs']['gapsvars']) Fc_in = vars[0] Fe_in = vars[1] Fh_in = vars[2] Fc,f = qcutils.GetSeriesasMA(ds,Fc_in) Fe,f = qcutils.GetSeriesasMA(ds,Fe_in) Fh,f = qcutils.GetSeriesasMA(ds,Fh_in) index = numpy.ma.where((Fc.mask==True) | (Fe.mask==True) | (Fh.mask==True))[0] # the following for ... in loop is not necessary for i in range(len(index)): j = index[i] if Fc.mask[j]==False: Fc.mask[j]=True Fc[j] = numpy.float64(-9999) ds.series[Fc_in]['Flag'][j] = numpy.int32(19) if Fe.mask[j]==False: Fe.mask[j]=True Fe[j] = numpy.float64(-9999) ds.series[Fe_in]['Flag'][j] = numpy.int32(19) if Fh.mask[j]==False: Fh.mask[j]=True Fh[j] = numpy.float64(-9999) ds.series[Fh_in]['Flag'][j] = numpy.int32(19) ds.series[Fc_in]['Data']=numpy.ma.filled(Fc,float(-9999)) ds.series[Fe_in]['Data']=numpy.ma.filled(Fe,float(-9999)) ds.series[Fh_in]['Data']=numpy.ma.filled(Fh,float(-9999)) log.info(' Finished gap co-ordination')
def CoordinateFluxGaps(cf, ds, Fc_in='Fc', Fe_in='Fe', Fh_in='Fh'): if not qcutils.cfoptionskey(cf, Key='CoordinateFluxGaps'): return if qcutils.cfkeycheck(cf, Base='FunctionArgs', ThisOne='gapsvars'): vars = ast.literal_eval(cf['FunctionArgs']['gapsvars']) Fc_in = vars[0] Fe_in = vars[1] Fh_in = vars[2] Fc, f = qcutils.GetSeriesasMA(ds, Fc_in) Fe, f = qcutils.GetSeriesasMA(ds, Fe_in) Fh, f = qcutils.GetSeriesasMA(ds, Fh_in) index = numpy.ma.where((Fc.mask == True) | (Fe.mask == True) | (Fh.mask == True))[0] # the following for ... in loop is not necessary for i in range(len(index)): j = index[i] if Fc.mask[j] == False: Fc.mask[j] = True Fc[j] = numpy.float64(-9999) ds.series[Fc_in]['Flag'][j] = numpy.int32(19) if Fe.mask[j] == False: Fe.mask[j] = True Fe[j] = numpy.float64(-9999) ds.series[Fe_in]['Flag'][j] = numpy.int32(19) if Fh.mask[j] == False: Fh.mask[j] = True Fh[j] = numpy.float64(-9999) ds.series[Fh_in]['Flag'][j] = numpy.int32(19) ds.series[Fc_in]['Data'] = numpy.ma.filled(Fc, float(-9999)) ds.series[Fe_in]['Data'] = numpy.ma.filled(Fe, float(-9999)) ds.series[Fh_in]['Data'] = numpy.ma.filled(Fh, float(-9999)) log.info(' Finished gap co-ordination')
def l6qc(cf, ds5, z): ds6 = copy.deepcopy(ds5) ds6.globalattributes['nc_level'] = 'L6' if (qcutils.cfkeycheck(cf, Base='Functions', ThisOne='L6_offline') and cf['Functions']['L6_offline'] == 'True') and qcutils.cfkeycheck( cf, Base='Functions', ThisOne='L6_keys'): try: ds6.globalattributes['L6Functions'] = ds6.globalattributes[ 'L6Functions'] + ', ' + cf['Functions']['L6_keys'] except: ds6.globalattributes['L6Functions'] = cf['Functions']['L6_keys'] z = z + 1 # run MOST (Buckingham Pi) 2d footprint model (Kljun et al. 2004) if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='footprint') and cf['Functions']['footprint'] == 'True': try: ds6.globalattributes['L6Functions'] = ds6.globalattributes[ 'L6Functions'] + ', footprint' except: ds6.globalattributes['L6Functions'] = 'footprint' qcts.do_footprint_2d(cf, ds6, datalevel='L6') try: ds6.globalattributes['Functions'] = ds6.globalattributes[ 'Functions'] + ', ' + ds6.globalattributes['L6Functions'] except: ds6.globalattributes['Functions'] = '' # re-apply the quality control checks (range, diurnal and rules) if z > 0: log.info(' Doing QC checks on L6 data') qcck.do_qcchecks(cf, ds6) try: ds6.globalattributes['L6Functions'] = ds6.globalattributes[ 'L6Functions'] + ', do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)' except: ds6.globalattributes[ 'L6Functions'] = 'do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)' return ds6, z
def plotxy(cf, nFig, plt_cf, dsa, dsb, si, ei): SiteName = dsa.globalattributes['site_name'] PlotDescription = cf['Plots'][str(nFig)]['Title'] fig = plt.figure(numpy.int32(nFig)) fig.clf() plt.figtext(0.5, 0.95, SiteName + ': ' + PlotDescription, ha='center', size=16) XSeries = ast.literal_eval(plt_cf['XSeries']) YSeries = ast.literal_eval(plt_cf['YSeries']) log.info(' Plotting xy: ' + str(XSeries) + ' v ' + str(YSeries)) if dsa == dsb: for xname, yname in zip(XSeries, YSeries): xa, flag, attr = qcutils.GetSeriesasMA(dsa, xname, si=si, ei=ei) ya, flag, attr = qcutils.GetSeriesasMA(dsa, yname, si=si, ei=ei) xyplot(xa, ya, sub=[1, 1, 1], regr=1, xlabel=xname, ylabel=yname) else: for xname, yname in zip(XSeries, YSeries): xa, flag, attr = qcutils.GetSeriesasMA(dsa, xname, si=si, ei=ei) ya, flag, attr = qcutils.GetSeriesasMA(dsa, yname, si=si, ei=ei) xb, flag, attr = qcutils.GetSeriesasMA(dsb, xname, si=si, ei=ei) yb, flag, attr = qcutils.GetSeriesasMA(dsb, yname, si=si, ei=ei) xyplot(xa, ya, sub=[1, 2, 1], xlabel=xname, ylabel=yname) xyplot(xb, yb, sub=[1, 2, 2], regr=1, xlabel=xname, ylabel=yname) STList = [] ETList = [] if ei == -1: L1XArray = numpy.array(dsa.series['DateTime']['Data'][si:ei]) else: L1XArray = numpy.array(dsa.series['DateTime']['Data'][si:ei + 1]) for fmt in ['%Y', '_', '%m', '_', '%d', '_', '%H', '%M']: STList.append(L1XArray[0].strftime(fmt)) if ei == -1: ETList.append(dsa.series['DateTime']['Data'][-1].strftime(fmt)) else: ETList.append(L1XArray[-1].strftime(fmt)) if qcutils.cfkeycheck( cf, Base='Output', ThisOne='PNGFile') and cf['Output']['PNGFile'] == 'True': log.info(' Generating a PNG file of the plot') PNGFileName = cf['Files']['PNG'][ 'PNGFilePath'] + 'Fig' + nFig + '_' + ''.join( STList) + '-' + ''.join(ETList) + '.png' plt.savefig(PNGFileName) fig.show()
def CoordinateAh7500AndFcGaps(cf,ds,Fcvar='Fc'): '''Cleans up Ah_7500_Av based upon Fc gaps to for QA check on Ah_7500_Av v Ah_HMP.''' log.info(' Doing the Ah_7500 check') if qcutils.cfkeycheck(cf,Base='FunctionArgs',ThisOne='AhcheckFc'): Fclist = ast.literal_eval(cf['FunctionArgs']['AhcheckFc']) Fcvar = Fclist[0] # index1 Index of bad Ah_7500_Av observations index1 = numpy.where((ds.series['Ah_7500_Av']['Flag']!=0) & (ds.series['Ah_7500_Av']['Flag']!=10)) # index2 Index of bad Fc observations index2 = numpy.where((ds.series[Fcvar]['Flag']!=0) & (ds.series[Fcvar]['Flag']!=10)) ds.series['Ah_7500_Av']['Data'][index2] = numpy.float64(c.missing_value) ds.series['Ah_7500_Av']['Flag'][index2] = ds.series[Fcvar]['Flag'][index2] ds.series['Ah_7500_Av']['Flag'][index1] = ds.series['Ah_7500_Av']['Flag'][index1]
def CoordinateAh7500AndFcGaps(cf,ds,Fcvar='Fc'): '''Cleans up Ah_7500_Av based upon Fc gaps to for QA check on Ah_7500_Av v Ah_HMP.''' if not qcutils.cfoptionskey(cf,Key='CoordinateAh7500&FcGaps'): return log.info(' Doing the Ah_7500 check') if qcutils.cfkeycheck(cf,Base='FunctionArgs',ThisOne='AhcheckFc'): Fclist = ast.literal_eval(cf['FunctionArgs']['AhcheckFc']) Fcvar = Fclist[0] # index1 Index of bad Ah_7500_Av observations index1 = numpy.where((ds.series['Ah_7500_Av']['Flag']!=0) & (ds.series['Ah_7500_Av']['Flag']!=10)) # index2 Index of bad Fc observations index2 = numpy.where((ds.series[Fcvar]['Flag']!=0) & (ds.series[Fcvar]['Flag']!=10)) ds.series['Ah_7500_Av']['Data'][index2] = numpy.float64(-9999) ds.series['Ah_7500_Av']['Flag'][index2] = ds.series[Fcvar]['Flag'][index2] ds.series['Ah_7500_Av']['Flag'][index1] = ds.series['Ah_7500_Av']['Flag'][index1] if 'CoordinateAh7500AndFcGaps' not in ds.globalattributes['Functions']: ds.globalattributes['Functions'] = ds.globalattributes['Functions']+',CoordinateAh7500AndFcGaps'
def CoordinateFluxGaps(cf,ds,Fc_in='Fc',Fe_in='Fe',Fh_in='Fh'): if qcutils.cfkeycheck(cf,Base='FunctionArgs',ThisOne='gapsvars'): vars = ast.literal_eval(cf['FunctionArgs']['gapsvars']) Fc_in = vars[0] Fe_in = vars[1] Fh_in = vars[2] Fc,flagC,a = qcutils.GetSeriesasMA(ds,Fc_in) Fe,flagE,a = qcutils.GetSeriesasMA(ds,Fe_in) Fh,flagH,a = qcutils.GetSeriesasMA(ds,Fh_in) index = numpy.ma.where((numpy.mod(flagC,10)!=0) | (numpy.mod(flagE,10)!=0) | (numpy.mod(flagH,10)!=0))[0] rC_i = numpy.ma.where((numpy.mod(flagC,10)==0) & ((numpy.mod(flagE,10)!=0) | (numpy.mod(flagH,10)!=0)))[0] rE_i = numpy.ma.where((numpy.mod(flagE,10)==0) & ((numpy.mod(flagC,10)!=0) | (numpy.mod(flagH,10)!=0)))[0] rH_i = numpy.ma.where((numpy.mod(flagH,10)==0) & ((numpy.mod(flagC,10)!=0) | (numpy.mod(flagE,10)!=0)))[0] ds.series[Fc_in]['Flag'][rC_i] = numpy.int32(19) ds.series[Fe_in]['Flag'][rE_i] = numpy.int32(19) ds.series[Fh_in]['Flag'][rH_i] = numpy.int32(19) flux_series = [Fc_in,Fe_in,Fh_in] for ThisOne in flux_series: index = numpy.where(ds.series[ThisOne]['Flag'] == 19)[0] ds.series[ThisOne]['Data'][index] = numpy.float64(c.missing_value) log.info(' Finished gap co-ordination')
def CoordinateAh7500AndFcGaps(cf, ds, Fcvar='Fc'): '''Cleans up Ah_7500_Av based upon Fc gaps to for QA check on Ah_7500_Av v Ah_HMP.''' if not qcutils.cfoptionskey(cf, Key='CoordinateAh7500&FcGaps'): return log.info(' Doing the Ah_7500 check') if qcutils.cfkeycheck(cf, Base='FunctionArgs', ThisOne='AhcheckFc'): Fclist = ast.literal_eval(cf['FunctionArgs']['AhcheckFc']) Fcvar = Fclist[0] # index1 Index of bad Ah_7500_Av observations index1 = numpy.where((ds.series['Ah_7500_Av']['Flag'] != 0) & (ds.series['Ah_7500_Av']['Flag'] != 10)) # index2 Index of bad Fc observations index2 = numpy.where((ds.series[Fcvar]['Flag'] != 0) & (ds.series[Fcvar]['Flag'] != 10)) ds.series['Ah_7500_Av']['Data'][index2] = numpy.float64(-9999) ds.series['Ah_7500_Av']['Flag'][index2] = ds.series[Fcvar]['Flag'][index2] ds.series['Ah_7500_Av']['Flag'][index1] = ds.series['Ah_7500_Av']['Flag'][ index1] if 'CoordinateAh7500AndFcGaps' not in ds.globalattributes['Functions']: ds.globalattributes['Functions'] = ds.globalattributes[ 'Functions'] + ',CoordinateAh7500AndFcGaps'
def plottimeseries(cf, nFig, dsa, dsb, si, ei): SiteName = dsa.globalattributes['site_name'] Level = dsb.globalattributes['nc_level'] dt = numpy.int32(dsa.globalattributes['time_step']) Month = dsa.series['Month']['Data'][0] p = plot_setup(cf, nFig) if qcutils.cfkeycheck(cf, Base='PlotSpec', ThisOne='Width') and qcutils.cfkeycheck( cf, Base='PlotSpec', ThisOne='Height'): p['PlotWidth'] = numpy.float64(cf['PlotSpec']['Width']) p['PlotHeight'] = numpy.float64(cf['PlotSpec']['Height']) log.info(' Plotting series: ' + str(p['SeriesList'])) L1XArray = numpy.array(dsa.series['DateTime']['Data'][si:ei]) L2XArray = numpy.array(dsb.series['DateTime']['Data'][si:ei]) p['XAxMin'] = min(L2XArray) p['XAxMax'] = max(L2XArray) p['loc'], p['fmt'] = get_ticks(p['XAxMin'], p['XAxMax']) plt.ioff() fig = plt.figure(numpy.int32(nFig), figsize=(p['PlotWidth'], p['PlotHeight'])) fig.clf() plt.figtext(0.5, 0.95, SiteName + ': ' + p['PlotDescription'], ha='center', size=16) for ThisOne, n in zip(p['SeriesList'], range(p['nGraphs'])): if ThisOne in dsa.series.keys(): aflag = dsa.series[ThisOne]['Flag'] p['Units'] = dsa.series[ThisOne]['Attr']['units'] p['YAxOrg'] = p['ts_YAxOrg'] + n * p['yaxOrgOffset'] L1YArray, p['nRecs'], p['nNotM'], p['nMskd'] = get_yarray(dsa, ThisOne, si=si, ei=ei) # check the control file to see if the Y axis minima have been specified nSer = p['SeriesList'].index(ThisOne) p['LYAxMax'], p['LYAxMin'] = get_yaxislimitsfromcf( cf, nFig, 'YLMax', 'YLMin', nSer, L1YArray) plot_onetimeseries_left(fig, n, ThisOne, L1XArray, L1YArray, p) if ThisOne in dsb.series.keys(): bflag = dsb.series[ThisOne]['Flag'] p['Units'] = dsb.series[ThisOne]['Attr']['units'] p['YAxOrg'] = p['ts_YAxOrg'] + n * p['yaxOrgOffset'] #Plot the Level 2 data series on the same X axis but with the scale on the right Y axis. L2YArray, p['nRecs'], p['nNotM'], p['nMskd'] = get_yarray(dsb, ThisOne, si=si, ei=ei) # check the control file to see if the Y axis minima have been specified nSer = p['SeriesList'].index(ThisOne) p['RYAxMax'], p['RYAxMin'] = get_yaxislimitsfromcf( cf, nFig, 'YRMax', 'YRMin', nSer, L2YArray) plot_onetimeseries_right(fig, n, ThisOne, L2XArray, L2YArray, p) #Plot the diurnal averages. Num2, Hr2, Av2, Sd2, Mx2, Mn2 = qcutils.get_diurnalstats( dsb.series['Hdh']['Data'][si:ei], dsb.series[ThisOne]['Data'][si:ei], dt) Av2 = numpy.ma.masked_where(Av2 == c.missing_value, Av2) Sd2 = numpy.ma.masked_where(Sd2 == c.missing_value, Sd2) Mx2 = numpy.ma.masked_where(Mx2 == c.missing_value, Mx2) Mn2 = numpy.ma.masked_where(Mn2 == c.missing_value, Mn2) hr2_ax = fig.add_axes([ p['hr1_XAxOrg'], p['YAxOrg'], p['hr2_XAxLen'], p['ts_YAxLen'] ]) hr2_ax.hold(True) hr2_ax.plot(Hr2, Av2, 'y-', Hr2, Mx2, 'r-', Hr2, Mn2, 'b-') section = qcutils.get_cfsection(cf, series=ThisOne, mode='quiet') if len(section) != 0: if 'DiurnalCheck' in cf[section][ThisOne].keys(): NSdarr = numpy.array(eval( cf[section][ThisOne]['DiurnalCheck']['NumSd']), dtype=numpy.float64) nSd = NSdarr[Month - 1] hr2_ax.plot(Hr2, Av2 + nSd * Sd2, 'r.', Hr2, Av2 - nSd * Sd2, 'b.') plt.xlim(0, 24) plt.xticks([0, 6, 12, 18, 24]) if n == 0: hr2_ax.set_xlabel('Hour', visible=True) else: hr2_ax.set_xlabel('', visible=False) plt.setp(hr2_ax.get_xticklabels(), visible=False) #if n > 0: plt.setp(hr2_ax.get_xticklabels(), visible=False) # vertical lines to show frequency distribution of flags bins = numpy.arange(0.5, 23.5) ind = bins[:len(bins) - 1] + 0.5 index = numpy.where(numpy.mod( bflag, 10) == 0) # find the elements with flag = 0, 10, 20 etc bflag[index] = 0 # set them all to 0 hist, bin_edges = numpy.histogram(bflag, bins=bins) ymin = hist * 0 delta = 0.01 * (numpy.max(hist) - numpy.min(hist)) bar_ax = fig.add_axes([ p['hr2_XAxOrg'], p['YAxOrg'], p['bar_XAxLen'], p['ts_YAxLen'] ]) bar_ax.set_ylim(0, numpy.max(hist)) bar_ax.vlines(ind, ymin, hist) for i, j in zip(ind, hist): if j > 0.05 * numpy.max(hist): bar_ax.text(i, j + delta, str(numpy.int32(i)), ha='center', size='small') if n == 0: bar_ax.set_xlabel('Flag', visible=True) else: bar_ax.set_xlabel('', visible=False) plt.setp(bar_ax.get_xticklabels(), visible=False) #if n > 0: plt.setp(bar_ax.get_xticklabels(), visible=False) else: log.error(' plttimeseries: series ' + ThisOne + ' not in data structure') STList = [] ETList = [] if ei == -1: L1XArray = numpy.array(dsa.series['DateTime']['Data'][si:ei]) else: L1XArray = numpy.array(dsa.series['DateTime']['Data'][si:ei + 1]) for fmt in ['%Y', '_', '%m', '_', '%d', '_', '%H', '%M']: STList.append(L1XArray[0].strftime(fmt)) if ei == -1: ETList.append(dsa.series['DateTime']['Data'][-1].strftime(fmt)) else: ETList.append(L1XArray[-1].strftime(fmt)) if qcutils.cfkeycheck( cf, Base='Output', ThisOne='PNGFile') and cf['Output']['PNGFile'] == 'True': log.info(' Generating a PNG file of the plot') PNGFileName = cf['Files']['PNG'][ 'PNGFilePath'] + 'Fig' + nFig + '_' + ''.join( STList) + '-' + ''.join(ETList) + '.png' plt.savefig(PNGFileName) fig.show()
def l4to6qc(cf,ds3,AttrLevel,InLevel,OutLevel): """ Fill gaps in met data from other sources Integrate SOLO-ANN gap filled fluxes performed externally Generates L4 from L3 data Generates daily sums excel workbook Variable Series: Meteorological (MList): Ah_EC, Cc_7500_Av, ps, Ta_EC, Ws_CSAT, Wd_CSAT Radiation (RList): Fld, Flu, Fn, Fsd, Fsu Soil water content (SwsList): all variables containing Sws in variable name Soil (SList): Fg, Ts, SwsList Turbulent fluxes (FList): Fc_wpl, Fe_wpl, Fh, ustar Output (OList): MList, RList, SList, FList Parameters loaded from control file: zmd: z-d z0: roughness height Functions performed: qcts.AddMetVars qcts.ComputeDailySums qcts.InterpolateOverMissing (OList for gaps shorter than 3 observations, OList gaps shorter than 7 observations) qcts.GapFillFromAlternate (MList, RList) qcts.GapFillFromClimatology (Ah_EC, Fn, Fg, ps, Ta_EC, Ws_CSAT, OList) qcts.GapFillFromRatios (Fe, Fh, Fc) qcts.ReplaceOnDiff (Ws_CSAT, ustar) qcts.UstarFromFh qcts.ReplaceWhereMissing (Ustar) qcck.do_qcchecks """ if AttrLevel == 'False': ds3.globalattributes['Functions'] = '' AttrLevel = InLevel # check to ensure L4 functions are defined in controlfile if qcutils.cfkeycheck(cf,Base='Functions'): x=0 y=0 z=0 else: log.error('FunctionList not found in control file') ds3x = copy.deepcopy(ds3) ds3x.globalattributes['nc_level'] = 'L3' ds3x.globalattributes['L4Functions'] = 'No L4-L6 functions applied' return ds3x # handle meta-data and import L4-L6 from external process if InLevel == 'L3': ds3x = copy.deepcopy(ds3) else: infilename = qcio.get_infilename_from_cf(cf,InLevel) ds3x = qcio.nc_read_series(infilename) for ThisOne in ds3.globalattributes.keys(): if ThisOne not in ds3x.globalattributes.keys(): ds3x.globalattributes[ThisOne] = ds3.globalattributes[ThisOne] for ThisOne in ds3.series.keys(): if ThisOne in ds3x.series.keys(): for attr in ds3.series[ThisOne]['Attr'].keys(): if attr not in ['ancillary_variables','long_name','standard_name','units']: ds3x.series[ThisOne]['Attr'][attr] = ds3.series[ThisOne]['Attr'][attr] ds3x.globalattributes['nc_level'] = AttrLevel ds3x.globalattributes['EPDversion'] = sys.version ds3x.globalattributes['QC_version_history'] = cfg.__doc__ # put the control file name into the global attributes ds3x.globalattributes['controlfile_name'] = cf['controlfile_name'] if OutLevel == 'L6': ds3x.globalattributes['xlL6_datemode'] = ds3x.globalattributes['xl_datemode'] ds3x.globalattributes['xl_datemode'] = ds3.globalattributes['xl_datemode'] ds3x.globalattributes['xlL6_filename'] = ds3x.globalattributes['xl_filename'] ds3x.globalattributes['xl_filename'] = ds3.globalattributes['xl_filename'] ds3x.globalattributes['xlL6_moddatetime'] = ds3x.globalattributes['xl_moddatetime'] ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes['xl_moddatetime'] elif OutLevel == 'L5': ds3x.globalattributes['xlL5_datemode'] = ds3x.globalattributes['xl_datemode'] ds3x.globalattributes['xl_datemode'] = ds3.globalattributes['xl_datemode'] ds3x.globalattributes['xlL5_filename'] = ds3x.globalattributes['xl_filename'] ds3x.globalattributes['xl_filename'] = ds3.globalattributes['xl_filename'] ds3x.globalattributes['xlL5_moddatetime'] = ds3x.globalattributes['xl_moddatetime'] ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes['xl_moddatetime'] elif OutLevel == 'L4': ds3x.globalattributes['xlL4_datemode'] = ds3x.globalattributes['xl_datemode'] ds3x.globalattributes['xl_datemode'] = ds3.globalattributes['xl_datemode'] ds3x.globalattributes['xlL4_filename'] = ds3x.globalattributes['xl_filename'] ds3x.globalattributes['xl_filename'] = ds3.globalattributes['xl_filename'] ds3x.globalattributes['xlL4_moddatetime'] = ds3x.globalattributes['xl_moddatetime'] ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes['xl_moddatetime'] qcutils.prepOzFluxVars(cf,ds3x) # convert Fc [mgCO2 m-2 s-1] to Fc_co2 [mgCO2 m-2 s-1], Fc_c [mgC m-2 s-1], NEE [umol m-2 s-1] and NEP = - NEE if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='convertFc') and cf['Functions']['convertFc'] == 'True': try: ds3x.globalattributes['L4Functions'] = ds3x.globalattributes['L4Functions']+', convertFc' except: ds3x.globalattributes['L4Functions'] = 'convertFc' if 'Fc_co2' in ds3x.series.keys(): qcts.ConvertFc(cf,ds3x,Fco2_in='Fc_co2') else: qcts.ConvertFc(cf,ds3x) ds4x = copy.deepcopy(ds3x) for ThisOne in ['NEE','NEP','Fc','Fc_co2','Fc_c','Fe','Fh']: if ThisOne in ds4x.series.keys() and ThisOne in ds3.series.keys(): ds4x.series[ThisOne] = ds3.series[ThisOne].copy() for ThisOne in ['GPP','CE','ER_night','ER_dark','CE_day','CE_NEEmax','ER_bio','PD','ER_n','ER_LRF']: if ThisOne in ds4x.series.keys(): ds4x.series[ThisOne]['Data'] = numpy.ones(len(ds4x.series[ThisOne]['Data']),dtype=numpy.float64) * numpy.float64(c.missing_value) ds4x.series[ThisOne]['Flag'] = numpy.ones(len(ds4x.series[ThisOne]['Data']), dtype=numpy.int32) if InLevel == 'L4' or AttrLevel == 'L3': ds4,x = l4qc(cf,ds4x,InLevel,x) qcutils.get_coverage_individual(ds4) qcutils.get_coverage_groups(ds4) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='FlagStats') and cf['Functions']['FlagStats'] == 'True': qcio.get_seriesstats(cf,ds4) if OutLevel == 'L5' or OutLevel == 'L6': try: ds4y = copy.deepcopy(ds4) except: ds4y = copy.deepcopy(ds4x) for ThisOne in ['NEE','NEP','Fc','Fc_c','Fc_co2','Fc_c','Fe','Fh']: var, var_flag, var_attr = qcutils.GetSeriesasMA(ds3x,ThisOne) qcutils.CreateSeries(ds4y,ThisOne,var,Flag=var_flag,Attr=var_attr) ds4y.series[ThisOne]['Attr']['long_name'] = var_attr['long_name'] ds5,y = l5qc(cf,ds4y,y) qcutils.get_coverage_individual(ds5) qcutils.get_coverage_groups(ds5) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='FlagStats') and cf['Functions']['FlagStats'] == 'True': qcio.get_seriesstats(cf,ds5) if OutLevel == 'L6': ds5z = copy.deepcopy(ds5) for ThisOne in ['GPP','CE','ER_night','ER_dark','CE_day','CE_NEEmax','ER_bio','PD','ER_n','ER_LRF']: if ThisOne in ds3x.series.keys(): ds5z.series[ThisOne] = ds3x.series[ThisOne].copy() ds6,z = l6qc(cf,ds5z,z) qcutils.get_coverage_individual(ds6) qcutils.get_coverage_groups(ds6) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='FlagStats') and cf['Functions']['FlagStats'] == 'True': qcio.get_seriesstats(cf,ds6) # calculate daily statistics if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Sums'): if cf['Functions']['Sums'] == 'L6': ds6.globalattributes['Functions'] = ds6.globalattributes['Functions']+', Sums' try: ds6.globalattributes['L6Functions'] = ds6.globalattributes['L6Functions']+', Sums' except: ds6.globalattributes['L6Functions'] = 'Sums' qcts.do_sums(cf,ds6) elif cf['Functions']['Sums'] == 'L5': ds5.globalattributes['Functions'] = ds5.globalattributes['Functions']+', Sums' try: ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions']+', Sums' except: ds5.globalattributes['L5Functions'] = 'Sums' qcts.do_sums(cf,ds5) elif cf['Functions']['Sums'] == 'L4': ds4.globalattributes['Functions'] = ds4.globalattributes['Functions']+', Sums' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes['L5Functions']+', Sums' except: ds4.globalattributes['L4Functions'] = 'Sums' qcts.do_sums(cf,ds4) # compute climatology if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='climatology'): if cf['Functions']['climatology'] == 'L6': ds6.globalattributes['Functions'] = ds6.globalattributes['Functions']+', climatology' try: ds6.globalattributes['L6Functions'] = ds6.globalattributes['L6Functions']+', climatology' except: ds6.globalattributes['L6Functions'] = 'climatology' qcts.do_climatology(cf,ds6) elif cf['Functions']['climatology'] == 'L5': ds5.globalattributes['Functions'] = ds5.globalattributes['Functions']+', climatology' try: ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions']+', climatology' except: ds5.globalattributes['L5Functions'] = 'climatology' qcts.do_climatology(cf,ds5) elif cf['Functions']['climatology'] == 'L4': ds4.globalattributes['Functions'] = ds4.globalattributes['Functions']+', climatology' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions']+', climatology' except: ds4.globalattributes['L4Functions'] = 'climatology' qcts.do_climatology(cf,ds4) if OutLevel == 'L4' and (InLevel == 'L3' or InLevel == 'L4'): if x == 0: ds4.globalattributes['Functions'] = ds4.globalattributes['Functions'] + ', No further L4 gapfilling' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions'] + ', No further L4 gapfilling' except: ds4.globalattributes['L4Functions'] = 'No further L4 gapfilling' log.warn(' L4: no record of gapfilling functions') return ds4 elif OutLevel == 'L5': if x == 0: if InLevel == 'L3' or InLevel == 'L4': ds4.globalattributes['Functions'] = ds4.globalattributes['Functions'] + ', No further L4 gapfilling' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions'] + ', No further L4 gapfilling' except: ds4.globalattributes['L4Functions'] = 'No further L4 gapfilling' log.warn(' L4: no record of gapfilling functions') ds5.globalattributes['Functions'] = ds5.globalattributes['Functions'] + ', No further L4 gapfilling' try: ds5.globalattributes['L4Functions'] = ds5.globalattributes['L4Functions'] + ', No further L4 gapfilling' except: ds5.globalattributes['L4Functions'] = 'No further L4 gapfilling' if y == 0: ds5.globalattributes['Functions'] = ds5.globalattributes['Functions'] + ', No further L5 gapfilling' try: ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions'] + ', No further L5 gapfilling' except: ds5.globalattributes['L5Functions'] = 'No further L5 gapfilling' log.warn(' L5: no record of gapfilling functions') return ds4,ds5 elif OutLevel == 'L6': if x == 0: if InLevel == 'L3' or InLevel == 'L4': ds4.globalattributes['Functions'] = ds4.globalattributes['Functions'] + ', No further L4 gapfilling' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions'] + ', No further L4 gapfilling' except: ds4.globalattributes['L4Functions'] = 'No further L4 gapfilling' log.warn(' L4: no record of gapfilling functions') if InLevel == 'L3' or InLevel == 'L4' or InLevel == 'L5': ds5.globalattributes['Functions'] = ds5.globalattributes['Functions'] + ', No further L4 gapfilling' try: ds5.globalattributes['L4Functions'] = ds5.globalattributes['L4Functions'] + ', No further L4 gapfilling' except: ds5.globalattributes['L4Functions'] = 'No further L4 gapfilling' log.warn(' L4: no record of gapfilling functions') ds6.globalattributes['Functions'] = ds6.globalattributes['Functions'] + ', No further L4 gapfilling' try: ds6.globalattributes['L4Functions'] = ds6.globalattributes['L4Functions'] + ', No further L4 gapfilling' except: ds6.globalattributes['L4Functions'] = 'No further L4 gapfilling' if y == 0: if InLevel == 'L3' or InLevel == 'L4' or InLevel == 'L5': ds5.globalattributes['Functions'] = ds5.globalattributes['Functions'] + ', No further L5 gapfilling' try: ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions'] + ', No further L5 gapfilling' except: ds5.globalattributes['L5Functions'] = 'No further L5 gapfilling' log.warn(' L5: no record of gapfilling functions') ds6.globalattributes['Functions'] = ds6.globalattributes['Functions'] + ', No further L5 gapfilling' try: ds6.globalattributes['L5Functions'] = ds6.globalattributes['L5Functions'] + ', No further L5 gapfilling' except: ds6.globalattributes['L5Functions'] = 'No further L5 gapfilling' if z == 0: ds6.globalattributes['Functions'] = ds6.globalattributes['Functions'] + ', No further L6 partitioning' try: ds6.globalattributes['L6Functions'] = ds5.globalattributes['L6Functions'] + ', No further L6 partitioning' except: ds6.globalattributes['L6Functions'] = 'No further L6 partitioning' log.warn(' L6: no record of gapfilling functions') return ds4,ds5,ds6
def l4to6qc(cf, ds3, AttrLevel, InLevel, OutLevel): """ Fill gaps in met data from other sources Integrate SOLO-ANN gap filled fluxes performed externally Generates L4 from L3 data Generates daily sums excel workbook Variable Series: Meteorological (MList): Ah_EC, Cc_7500_Av, ps, Ta_EC, Ws_CSAT, Wd_CSAT Radiation (RList): Fld, Flu, Fn, Fsd, Fsu Soil water content (SwsList): all variables containing Sws in variable name Soil (SList): Fg, Ts, SwsList Turbulent fluxes (FList): Fc_wpl, Fe_wpl, Fh, ustar Output (OList): MList, RList, SList, FList Parameters loaded from control file: zmd: z-d z0: roughness height Functions performed: qcts.AddMetVars qcts.ComputeDailySums qcts.InterpolateOverMissing (OList for gaps shorter than 3 observations, OList gaps shorter than 7 observations) qcts.GapFillFromAlternate (MList, RList) qcts.GapFillFromClimatology (Ah_EC, Fn, Fg, ps, Ta_EC, Ws_CSAT, OList) qcts.GapFillFromRatios (Fe, Fh, Fc) qcts.ReplaceOnDiff (Ws_CSAT, ustar) qcts.UstarFromFh qcts.ReplaceWhereMissing (Ustar) qcck.do_qcchecks """ if AttrLevel == 'False': ds3.globalattributes['Functions'] = '' AttrLevel = InLevel # check to ensure L4 functions are defined in controlfile if qcutils.cfkeycheck(cf, Base='Functions'): x = 0 y = 0 z = 0 else: log.error('FunctionList not found in control file') ds3x = copy.deepcopy(ds3) ds3x.globalattributes['nc_level'] = 'L3' ds3x.globalattributes['L4Functions'] = 'No L4-L6 functions applied' return ds3x # handle meta-data and import L4-L6 from external process if InLevel == 'L3': ds3x = copy.deepcopy(ds3) else: infilename = qcio.get_infilename_from_cf(cf, InLevel) ds3x = qcio.nc_read_series(infilename) for ThisOne in ds3.globalattributes.keys(): if ThisOne not in ds3x.globalattributes.keys(): ds3x.globalattributes[ThisOne] = ds3.globalattributes[ThisOne] for ThisOne in ds3.series.keys(): if ThisOne in ds3x.series.keys(): for attr in ds3.series[ThisOne]['Attr'].keys(): if attr not in [ 'ancillary_variables', 'long_name', 'standard_name', 'units' ]: ds3x.series[ThisOne]['Attr'][attr] = ds3.series[ ThisOne]['Attr'][attr] ds3x.globalattributes['nc_level'] = AttrLevel ds3x.globalattributes['EPDversion'] = sys.version ds3x.globalattributes['QC_version_history'] = cfg.__doc__ # put the control file name into the global attributes ds3x.globalattributes['controlfile_name'] = cf['controlfile_name'] if OutLevel == 'L6': ds3x.globalattributes['xlL6_datemode'] = ds3x.globalattributes[ 'xl_datemode'] ds3x.globalattributes['xl_datemode'] = ds3.globalattributes[ 'xl_datemode'] ds3x.globalattributes['xlL6_filename'] = ds3x.globalattributes[ 'xl_filename'] ds3x.globalattributes['xl_filename'] = ds3.globalattributes[ 'xl_filename'] ds3x.globalattributes['xlL6_moddatetime'] = ds3x.globalattributes[ 'xl_moddatetime'] ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes[ 'xl_moddatetime'] elif OutLevel == 'L5': ds3x.globalattributes['xlL5_datemode'] = ds3x.globalattributes[ 'xl_datemode'] ds3x.globalattributes['xl_datemode'] = ds3.globalattributes[ 'xl_datemode'] ds3x.globalattributes['xlL5_filename'] = ds3x.globalattributes[ 'xl_filename'] ds3x.globalattributes['xl_filename'] = ds3.globalattributes[ 'xl_filename'] ds3x.globalattributes['xlL5_moddatetime'] = ds3x.globalattributes[ 'xl_moddatetime'] ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes[ 'xl_moddatetime'] elif OutLevel == 'L4': ds3x.globalattributes['xlL4_datemode'] = ds3x.globalattributes[ 'xl_datemode'] ds3x.globalattributes['xl_datemode'] = ds3.globalattributes[ 'xl_datemode'] ds3x.globalattributes['xlL4_filename'] = ds3x.globalattributes[ 'xl_filename'] ds3x.globalattributes['xl_filename'] = ds3.globalattributes[ 'xl_filename'] ds3x.globalattributes['xlL4_moddatetime'] = ds3x.globalattributes[ 'xl_moddatetime'] ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes[ 'xl_moddatetime'] qcutils.prepOzFluxVars(cf, ds3x) # convert Fc [mgCO2 m-2 s-1] to Fc_co2 [mgCO2 m-2 s-1], Fc_c [mgC m-2 s-1], NEE [umol m-2 s-1] and NEP = - NEE if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='convertFc' ) and cf['Functions']['convertFc'] == 'True': try: ds3x.globalattributes['L4Functions'] = ds3x.globalattributes[ 'L4Functions'] + ', convertFc' except: ds3x.globalattributes['L4Functions'] = 'convertFc' if 'Fc_co2' in ds3x.series.keys(): qcts.ConvertFc(cf, ds3x, Fco2_in='Fc_co2') else: qcts.ConvertFc(cf, ds3x) ds4x = copy.deepcopy(ds3x) for ThisOne in ['NEE', 'NEP', 'Fc', 'Fc_co2', 'Fc_c', 'Fe', 'Fh']: if ThisOne in ds4x.series.keys() and ThisOne in ds3.series.keys(): ds4x.series[ThisOne] = ds3.series[ThisOne].copy() for ThisOne in [ 'GPP', 'CE', 'ER_night', 'ER_dark', 'CE_day', 'CE_NEEmax', 'ER_bio', 'PD', 'ER_n', 'ER_LRF' ]: if ThisOne in ds4x.series.keys(): ds4x.series[ThisOne]['Data'] = numpy.ones( len(ds4x.series[ThisOne]['Data']), dtype=numpy.float64) * numpy.float64(c.missing_value) ds4x.series[ThisOne]['Flag'] = numpy.ones(len( ds4x.series[ThisOne]['Data']), dtype=numpy.int32) if InLevel == 'L4' or AttrLevel == 'L3': ds4, x = l4qc(cf, ds4x, InLevel, x) qcutils.get_coverage_individual(ds4) qcutils.get_coverage_groups(ds4) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='FlagStats' ) and cf['Functions']['FlagStats'] == 'True': qcio.get_seriesstats(cf, ds4) if OutLevel == 'L5' or OutLevel == 'L6': try: ds4y = copy.deepcopy(ds4) except: ds4y = copy.deepcopy(ds4x) for ThisOne in [ 'NEE', 'NEP', 'Fc', 'Fc_c', 'Fc_co2', 'Fc_c', 'Fe', 'Fh' ]: var, var_flag, var_attr = qcutils.GetSeriesasMA(ds3x, ThisOne) qcutils.CreateSeries(ds4y, ThisOne, var, Flag=var_flag, Attr=var_attr) ds4y.series[ThisOne]['Attr']['long_name'] = var_attr['long_name'] ds5, y = l5qc(cf, ds4y, y) qcutils.get_coverage_individual(ds5) qcutils.get_coverage_groups(ds5) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='FlagStats' ) and cf['Functions']['FlagStats'] == 'True': qcio.get_seriesstats(cf, ds5) if OutLevel == 'L6': ds5z = copy.deepcopy(ds5) for ThisOne in [ 'GPP', 'CE', 'ER_night', 'ER_dark', 'CE_day', 'CE_NEEmax', 'ER_bio', 'PD', 'ER_n', 'ER_LRF' ]: if ThisOne in ds3x.series.keys(): ds5z.series[ThisOne] = ds3x.series[ThisOne].copy() ds6, z = l6qc(cf, ds5z, z) qcutils.get_coverage_individual(ds6) qcutils.get_coverage_groups(ds6) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='FlagStats' ) and cf['Functions']['FlagStats'] == 'True': qcio.get_seriesstats(cf, ds6) # calculate daily statistics if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Sums'): if cf['Functions']['Sums'] == 'L6': ds6.globalattributes[ 'Functions'] = ds6.globalattributes['Functions'] + ', Sums' try: ds6.globalattributes['L6Functions'] = ds6.globalattributes[ 'L6Functions'] + ', Sums' except: ds6.globalattributes['L6Functions'] = 'Sums' qcts.do_sums(cf, ds6) elif cf['Functions']['Sums'] == 'L5': ds5.globalattributes[ 'Functions'] = ds5.globalattributes['Functions'] + ', Sums' try: ds5.globalattributes['L5Functions'] = ds5.globalattributes[ 'L5Functions'] + ', Sums' except: ds5.globalattributes['L5Functions'] = 'Sums' qcts.do_sums(cf, ds5) elif cf['Functions']['Sums'] == 'L4': ds4.globalattributes[ 'Functions'] = ds4.globalattributes['Functions'] + ', Sums' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes[ 'L5Functions'] + ', Sums' except: ds4.globalattributes['L4Functions'] = 'Sums' qcts.do_sums(cf, ds4) # compute climatology if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='climatology'): if cf['Functions']['climatology'] == 'L6': ds6.globalattributes['Functions'] = ds6.globalattributes[ 'Functions'] + ', climatology' try: ds6.globalattributes['L6Functions'] = ds6.globalattributes[ 'L6Functions'] + ', climatology' except: ds6.globalattributes['L6Functions'] = 'climatology' qcts.do_climatology(cf, ds6) elif cf['Functions']['climatology'] == 'L5': ds5.globalattributes['Functions'] = ds5.globalattributes[ 'Functions'] + ', climatology' try: ds5.globalattributes['L5Functions'] = ds5.globalattributes[ 'L5Functions'] + ', climatology' except: ds5.globalattributes['L5Functions'] = 'climatology' qcts.do_climatology(cf, ds5) elif cf['Functions']['climatology'] == 'L4': ds4.globalattributes['Functions'] = ds4.globalattributes[ 'Functions'] + ', climatology' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes[ 'L4Functions'] + ', climatology' except: ds4.globalattributes['L4Functions'] = 'climatology' qcts.do_climatology(cf, ds4) if OutLevel == 'L4' and (InLevel == 'L3' or InLevel == 'L4'): if x == 0: ds4.globalattributes['Functions'] = ds4.globalattributes[ 'Functions'] + ', No further L4 gapfilling' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes[ 'L4Functions'] + ', No further L4 gapfilling' except: ds4.globalattributes[ 'L4Functions'] = 'No further L4 gapfilling' log.warn(' L4: no record of gapfilling functions') return ds4 elif OutLevel == 'L5': if x == 0: if InLevel == 'L3' or InLevel == 'L4': ds4.globalattributes['Functions'] = ds4.globalattributes[ 'Functions'] + ', No further L4 gapfilling' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes[ 'L4Functions'] + ', No further L4 gapfilling' except: ds4.globalattributes[ 'L4Functions'] = 'No further L4 gapfilling' log.warn(' L4: no record of gapfilling functions') ds5.globalattributes['Functions'] = ds5.globalattributes[ 'Functions'] + ', No further L4 gapfilling' try: ds5.globalattributes['L4Functions'] = ds5.globalattributes[ 'L4Functions'] + ', No further L4 gapfilling' except: ds5.globalattributes[ 'L4Functions'] = 'No further L4 gapfilling' if y == 0: ds5.globalattributes['Functions'] = ds5.globalattributes[ 'Functions'] + ', No further L5 gapfilling' try: ds5.globalattributes['L5Functions'] = ds5.globalattributes[ 'L5Functions'] + ', No further L5 gapfilling' except: ds5.globalattributes[ 'L5Functions'] = 'No further L5 gapfilling' log.warn(' L5: no record of gapfilling functions') return ds4, ds5 elif OutLevel == 'L6': if x == 0: if InLevel == 'L3' or InLevel == 'L4': ds4.globalattributes['Functions'] = ds4.globalattributes[ 'Functions'] + ', No further L4 gapfilling' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes[ 'L4Functions'] + ', No further L4 gapfilling' except: ds4.globalattributes[ 'L4Functions'] = 'No further L4 gapfilling' log.warn(' L4: no record of gapfilling functions') if InLevel == 'L3' or InLevel == 'L4' or InLevel == 'L5': ds5.globalattributes['Functions'] = ds5.globalattributes[ 'Functions'] + ', No further L4 gapfilling' try: ds5.globalattributes['L4Functions'] = ds5.globalattributes[ 'L4Functions'] + ', No further L4 gapfilling' except: ds5.globalattributes[ 'L4Functions'] = 'No further L4 gapfilling' log.warn(' L4: no record of gapfilling functions') ds6.globalattributes['Functions'] = ds6.globalattributes[ 'Functions'] + ', No further L4 gapfilling' try: ds6.globalattributes['L4Functions'] = ds6.globalattributes[ 'L4Functions'] + ', No further L4 gapfilling' except: ds6.globalattributes[ 'L4Functions'] = 'No further L4 gapfilling' if y == 0: if InLevel == 'L3' or InLevel == 'L4' or InLevel == 'L5': ds5.globalattributes['Functions'] = ds5.globalattributes[ 'Functions'] + ', No further L5 gapfilling' try: ds5.globalattributes['L5Functions'] = ds5.globalattributes[ 'L5Functions'] + ', No further L5 gapfilling' except: ds5.globalattributes[ 'L5Functions'] = 'No further L5 gapfilling' log.warn(' L5: no record of gapfilling functions') ds6.globalattributes['Functions'] = ds6.globalattributes[ 'Functions'] + ', No further L5 gapfilling' try: ds6.globalattributes['L5Functions'] = ds6.globalattributes[ 'L5Functions'] + ', No further L5 gapfilling' except: ds6.globalattributes[ 'L5Functions'] = 'No further L5 gapfilling' if z == 0: ds6.globalattributes['Functions'] = ds6.globalattributes[ 'Functions'] + ', No further L6 partitioning' try: ds6.globalattributes['L6Functions'] = ds5.globalattributes[ 'L6Functions'] + ', No further L6 partitioning' except: ds6.globalattributes[ 'L6Functions'] = 'No further L6 partitioning' log.warn(' L6: no record of gapfilling functions') return ds4, ds5, ds6
def l4qc(cf, ds3, InLevel, x): ds4 = copy.deepcopy(ds3) ds4.globalattributes['nc_level'] = 'L4' if (qcutils.cfkeycheck(cf, Base='Functions', ThisOne='L4_offline') and cf['Functions']['L4_offline'] == 'True') and qcutils.cfkeycheck( cf, Base='Functions', ThisOne='L4_keys'): try: ds4.globalattributes['L4Functions'] = ds4.globalattributes[ 'L4Functions'] + ', ' + cf['Functions']['L4_keys'] except: ds4.globalattributes['L4Functions'] = cf['Functions']['L4_keys'] x = x + 1 # determine HMP Ah if not output by datalogger if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='CalculateAh' ) and cf['Functions']['CalculateAh'] == 'True': try: ds4.globalattributes['L4Functions'] = ds4.globalattributes[ 'L4Functions'] + ', CalculateAh' except: ds4.globalattributes['L4Functions'] = 'CalculateAh' log.info(' Adding HMP Ah to database') qcts.CalculateAhHMP(cf, ds4) # add relevant meteorological values to L4 data if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='CalculateMetVars' ) and cf['Functions']['CalculateMetVars'] == 'True': try: ds4.globalattributes['L4Functions'] = ds4.globalattributes[ 'L4Functions'] + ', CalculateMetVars' except: ds4.globalattributes['L4Functions'] = 'CalculateMetVars' log.info(' Adding standard met variables to database') qcts.CalculateMeteorologicalVariables(ds4) # merge CSAT and wind sentry wind speed if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='MergeSeriesWS' ) and cf['Functions']['MergeSeriesWS'] == 'True': try: ds4.globalattributes['L4Functions'] = ds4.globalattributes[ 'L4Functions'] + ', MergeSeriesWS' except: ds4.globalattributes['L4Functions'] = 'MergeSeriesWS' qcts.MergeSeries(cf, ds4, 'Ws', [0, 10]) # linear interpolation to fill missing values over gaps of 1 hour if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='InterpolateOverMissing' ) and cf['Functions']['InterpolateOverMissing'] == 'True': try: ds4.globalattributes['L4Functions'] = ds4.globalattributes[ 'L4Functions'] + ', InterpolateOverMissing' except: ds4.globalattributes['L4Functions'] = 'InterpolateOverMissing' log.info( ' Gap filling by linear interpolation to fill missing values over gaps of 1 hour' ) for ThisOne in cf['InterpolateVars'].keys(): qcts.InterpolateOverMissing(cf, ds4, series=ThisOne, maxlen=2) x = x + 1 # re-apply the quality control checks (range, diurnal and rules) if x > 0: log.info(' Doing QC checks on L4 data') qcck.do_qcchecks(cf, ds4) try: ds4.globalattributes['L4Functions'] = ds4.globalattributes[ 'L4Functions'] + ', do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)' except: ds4.globalattributes[ 'L4Functions'] = 'do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)' # interpolate over any ramaining gaps up to 3 hours in length if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='InterpolateOverMissing' ) and cf['Functions']['InterpolateOverMissing'] == 'True': for ThisOne in cf['InterpolateVars'].keys(): qcts.InterpolateOverMissing(cf, ds4, series=ThisOne, maxlen=6) ds4.globalattributes['Functions'] = ds4.globalattributes[ 'Functions'] + ', ' + ds4.globalattributes['L4Functions'] return ds4, x
def l5qc(cf, ds4, y): ds5 = copy.deepcopy(ds4) ds5.globalattributes['nc_level'] = 'L5' if (qcutils.cfkeycheck(cf, Base='Functions', ThisOne='L5_offline') and cf['Functions']['L5_offline'] == 'True') and qcutils.cfkeycheck( cf, Base='Functions', ThisOne='L5_keys'): try: ds5.globalattributes['L5Functions'] = ds5.globalattributes[ 'L5Functions'] + ', ' + cf['Functions']['L5_keys'] except: ds5.globalattributes['L5Functions'] = cf['Functions']['L5_keys'] y = y + 1 # calculate u* from Fh and corrected wind speed if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='UstarFromFh' ) and cf['Functions']['UstarFromFh'] == 'True': try: ds5.globalattributes['L5Functions'] = ds4.globalattributes[ 'L5Functions'] + ', UstarFromFh' except: ds4.globalattributes['L5Functions'] = 'UstarFromFh' qcts.UstarFromFh(cf, ds5) y = y + 1 # calcluate ET at observation interval if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='CalculateET' ) and cf['Functions']['CalculateET'] == 'True': try: ds5.globalattributes['L5Functions'] = ds5.globalattributes[ 'L5Functions'] + ', CalculateET' except: ds5.globalattributes['L5Functions'] = 'CalculateET' log.info(' Calculating ET') qcts.CalculateET(cf, ds5, 'L5') # calculate rst, rc and Gst, Gc from Penman-Monteith inversion if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='PenmanMonteith' ) and cf['Functions']['PenmanMonteith'] == 'True': try: ds5.globalattributes['L5Functions'] = ds5.globalattributes[ 'L5Functions'] + ', PenmanMonteith' except: ds5.globalattributes['L5Functions'] = 'PenmanMonteith' qcts.do_PenmanMonteith(cf, ds5) # re-calculate the available energy from L5 (gapfilled) fluxes try: ds5.globalattributes['L5Functions'] = ds5.globalattributes[ 'L5Functions'] + ', CalculateAvailableEnergy' except: ds.globalattributes['L5Functions'] = 'CalculateAvailableEnergy' qcts.CalculateAvailableEnergy(ds5) # re-apply the quality control checks (range, diurnal and rules) if y > 0: log.info(' Doing QC checks on L5 data') qcck.do_qcchecks(cf, ds5) try: ds5.globalattributes['L5Functions'] = ds5.globalattributes[ 'L5Functions'] + ', do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)' except: ds5.globalattributes[ 'L5Functions'] = 'do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)' try: ds5.globalattributes['Functions'] = ds5.globalattributes[ 'Functions'] + ', ' + ds5.globalattributes['L5Functions'] except: ds5.globalattributes['Functions'] = ds5.globalattributes['Functions'] return ds5, y
def l3qc(cf,ds2): """ Corrections Generates L3 from L2 data Functions performed: qcts.AddMetVars (optional) qcts.CorrectSWC (optional*) qcck.do_linear (all sites) qcutils.GetMergeList + qcts.MergeSeries Ah_EC (optional)x qcts.TaFromTv (optional) qcutils.GetMergeList + qcts.MergeSeries Ta_EC (optional)x qcts.CoordRotation2D (all sites) qcts.MassmanApprox (optional*)y qcts.Massman (optional*)y qcts.CalculateFluxes (used if Massman not optioned)x qcts.CalculateFluxesRM (used if Massman optioned)y qcts.FhvtoFh (all sites) qcts.Fe_WPL (WPL computed on fluxes, as with Campbell algorithm)+x qcts.Fc_WPL (WPL computed on fluxes, as with Campbell algorithm)+x qcts.Fe_WPLcov (WPL computed on kinematic fluxes (ie, covariances), as with WPL80)+y qcts.Fc_WPLcov (WPL computed on kinematic fluxes (ie, covariances), as with WPL80)+y qcts.CalculateNetRadiation (optional) qcutils.GetMergeList + qcts.MergeSeries Fsd (optional) qcutils.GetMergeList + qcts.MergeSeries Fn (optional*) qcts.InterpolateOverMissing (optional) AverageSeriesByElements (optional) qcts.CorrectFgForStorage (all sites) qcts.Average3SeriesByElements (optional) qcts.CalculateAvailableEnergy (optional) qcck.do_qcchecks (all sites) qcck.gaps (optional) *: requires ancillary measurements for paratmerisation +: each site requires one pair, either Fe_WPL & Fc_WPL (default) or Fe_WPLCov & FcWPLCov x: required together in option set y: required together in option set """ # make a copy of the L2 data ds3 = copy.deepcopy(ds2) ds3.globalattributes['nc_level'] = 'L3' ds3.globalattributes['EPDversion'] = sys.version ds3.globalattributes['QC_version_history'] = cfg.__doc__ # put the control file name into the global attributes ds3.globalattributes['controlfile_name'] = cf['controlfile_name'] # calculate NDVI if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='NDVI') and cf['Functions']['NDVI'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', calculateNDVI' except: ds3.globalattributes['L3Functions'] = 'calculateNDVI' log.info(' Calculating NDVI from component reflectances ...') qcts.CalculateNDVI(cf,ds3) # bypass soil temperature correction for Sws (when Ts bad) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='BypassSwsTcorr') and cf['Functions']['BypassSwsTcorr'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', BypassSwsTcorr' except: ds3.globalattributes['L3Functions'] = 'BypassSwsTcorr' log.info(' Re-computing Sws without temperature correction ...') qcts.BypassTcorr(cf,ds3) # correct measured soil water content using empirical relationship to collected samples if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CorrectSWC') and cf['Functions']['CorrectSWC'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CorrectSWC' except: ds3.globalattributes['L3Functions'] = 'CorrectSWC' log.info(' Correcting soil moisture data ...') qcts.CorrectSWC(cf,ds3) # apply linear corrections to the data if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', do_linear' except: ds3.globalattributes['L3Functions'] = 'do_linear' log.info(' Applying linear corrections ...') qcck.do_linear(cf,ds3) # determine HMP Ah if not output by datalogger if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CalculateAh') and cf['Functions']['CalculateAh'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CalculateAh' except: ds3.globalattributes['L3Functions'] = 'CalculateAh' log.info(' Adding HMP Ah to database') qcts.CalculateAhHMP(cf,ds3) # merge the HMP and corrected 7500 data if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='MergeSeriesAhTa') and cf['Functions']['MergeSeriesAhTa'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', MergeSeriesAhTaCc' except: ds3.globalattributes['L3Functions'] = 'MergeSeriesAhTaCc' qcts.MergeSeries(cf,ds3,'Ah',[0,10]) qcts.MergeSeries(cf,ds3,'Cc',[0,10]) # get the air temperature from the CSAT virtual temperature try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', TaFromTv' except: ds3.globalattributes['L3Functions'] = 'TaFromTv' qcts.TaFromTv(cf,ds3) # merge the HMP and corrected CSAT data qcts.MergeSeries(cf,ds3,'Ta',[0,10]) # add relevant meteorological values to L3 data if (qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True') or (qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CalculateMetVars') and cf['Functions']['CalculateMetVars'] == 'True'): try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CalculateMetVars' except: ds3.globalattributes['L3Functions'] = 'CalculateMetVars' log.info(' Adding standard met variables to database') qcts.CalculateMeteorologicalVariables(ds3) # do the 2D coordinate rotation if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CoordRotation2D' except: ds3.globalattributes['L3Functions'] = 'CoordRotation2D' qcts.CoordRotation2D(cf,ds3) # do the Massman frequency attenuation correction if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', Massman' except: ds3.globalattributes['L3Functions'] = 'Massman' qcts.MassmanStandard(cf,ds3) # calculate the fluxes if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CalculateFluxes' except: ds3.globalattributes['L3Functions'] = 'CalculateFluxes' qcts.CalculateFluxes(cf,ds3) # approximate wT from virtual wT using wA (ref: Campbell OPECSystem manual) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', FhvtoFh' except: ds3.globalattributes['L3Functions'] = 'FhvtoFh' qcts.FhvtoFh(cf,ds3) # correct the H2O & CO2 flux due to effects of flux on density measurements if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='WPLcov') and cf['Functions']['WPLcov'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', WPLcov' except: ds3.globalattributes['L3Functions'] = 'WPLcov' qcts.do_WPL(cf,ds3,cov='True') else: try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', WPL' except: ds3.globalattributes['L3Functions'] = 'WPL' qcts.do_WPL(cf,ds3) # calculate the net radiation from the Kipp and Zonen CNR1 if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CalculateNetRadiation') and cf['Functions']['CalculateNetRadiation'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CalculateNetRadiation' except: ds3.globalattributes['L3Functions'] = 'CalculateNetRadiation' qcts.MergeSeries(cf,ds3,'Fsd',[0,10]) qcts.CalculateNetRadiation(ds3,'Fn_KZ','Fsd','Fsu','Fld','Flu') qcts.MergeSeries(cf,ds3,'Fn',[0,10]) # combine wind speed from the CSAT and the Wind Sentry if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='MergeSeriesWS') and cf['Functions']['MergeSeriesWS'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', MergeSeriesWS' except: ds3.globalattributes['L3Functions'] = 'MergeSeriesWS' qcts.MergeSeries(cf,ds3,'Ws',[0,10]) # average the soil temperature data if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': if 'SoilAverage' not in ds3.globalattributes['L3Functions']: try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', SoilAverage' except: ds3.globalattributes['L3Functions'] = 'SoilAverage' # interpolate over any ramaining gaps up to 3 hours in length qcts.AverageSeriesByElementsI(cf,ds3,'Ts') qcts.AverageSeriesByElementsI(cf,ds3,'Sws') # correct the measured soil heat flux for storage in the soil layer above the sensor if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CorrectFgForStorage' except: ds3.globalattributes['L3Functions'] = 'CorrectFgForStorage' if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='IndividualFgCorrection') and cf['Functions']['IndividualFgCorrection'] == 'True': qcts.CorrectIndividualFgForStorage(cf,ds3) qcts.AverageSeriesByElementsI(cf,ds3,'Fg') else: qcts.AverageSeriesByElementsI(cf,ds3,'Fg') qcts.CorrectGroupFgForStorage(cf,ds3) # calculate the available energy if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CalculateAvailableEnergy') and cf['Functions']['CalculateAvailableEnergy'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CalculateAvailableEnergy' except: ds3.globalattributes['L3Functions'] = 'CalculateAvailableEnergy' qcts.CalculateAvailableEnergy(ds3) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='DiagnosticMode'): if cf['Functions']['DiagnosticMode'] == 'False': qcutils.prepOzFluxVars(cf,ds3) else: qcutils.prepOzFluxVars(cf,ds3) # calculate specific humidity and saturated specific humidity profile if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='qTprofile') and cf['Functions']['qTprofile'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', qTprofile' except: ds3.globalattributes['L3Functions'] = 'qTprofile' qcts.CalculateSpecificHumidityProfile(cf,ds3) # calculate Penman-Monteith inversion if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='PenmanMonteith') and cf['Functions']['PenmanMonteith'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', PenmanMonteith' except: ds3.globalattributes['L3Functions'] = 'PenmanMonteith' qcts.do_PenmanMonteith(cf,ds3) # calculate bulk Richardson numbers if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='bulkRichardson') and cf['Functions']['bulkRichardson'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', bulkRichardson' except: ds3.globalattributes['L3Functions'] = 'bulkRichardson' qcts.do_bulkRichardson(cf,ds3) # re-apply the quality control checks (range, diurnal and rules) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)' qcck.do_qcchecks(cf,ds3) # apply the ustar filter if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='ustarFilter') and cf['Functions']['ustarFilter'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', ustarFilter' except: ds3.globalattributes['L3Functions'] = 'ustarFilter' qcts.FilterFcByUstar(cf,ds3) # coordinate gaps in the three main fluxes if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CoordinateFluxGaps') and cf['Functions']['CoordinateFluxGaps'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CoordinateFluxGaps' except: ds3.globalattributes['L3Functions'] = 'CoordinateFluxGaps' qcck.CoordinateFluxGaps(cf,ds3) # coordinate gaps in Ah_7500_Av with Fc if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CoordinateAh7500AndFcGaps' except: ds3.globalattributes['L3Functions'] = 'CoordinateAh7500AndFcGaps' qcck.CoordinateAh7500AndFcGaps(cf,ds3) # calcluate ET at observation interval if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CalculateET') and cf['Functions']['CalculateET'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CalculateET' except: ds3.globalattributes['L3Functions'] = 'CalculateET' log.info(' Calculating ET') qcts.CalculateET(cf,ds3,'L3') # run MOST (Buckingham Pi) 2d footprint model (Kljun et al. 2004) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='footprint') and cf['Functions']['footprint'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', footprint' except: ds3.globalattributes['L3Functions'] = 'footprint' qcts.do_footprint_2d(cf,ds3) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': qcio.get_seriesstats(cf,ds3) # convert Fc [mgCO2 m-2 s-1] to Fc_co2 [mgCO2 m-2 s-1], Fc_c [mgC m-2 s-1], NEE [umol m-2 s-1] and NEP = - NEE if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='convertFc') and cf['Functions']['convertFc'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', convertFc' except: ds3.globalattributes['L3Functions'] = 'convertFc' qcts.ConvertFc(cf,ds3) # convert Fc [mgCO2 m-2 s-1] to Fc [umol m-2 s-1] if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='JasonFc') and cf['Functions']['JasonFc'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', convertFc (umol only)' except: ds3.globalattributes['L3Functions'] = 'convertFc (umol only)' qcts.ConvertFcJason(cf,ds3) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds3) # write the percentage of good data for groups qcutils.get_coverage_groups(ds3) # compute climatology for L3 data if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='climatology') and cf['Functions']['climatology'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', climatology' except: ds3.globalattributes['L3Functions'] = 'climatology' qcts.do_climatology(cf,ds3) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Sums') and cf['Functions']['Sums'] == 'L3': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L5Functions']+', Sums' except: ds3.globalattributes['L3Functions'] = 'Sums' qcts.do_sums(cf,ds3) try: ds3.globalattributes['Functions'] = ds3.globalattributes['Functions'] + ', ' + ds3.globalattributes['L3Functions'] except: ds3.globalattributes['Functions'] = ds3.globalattributes['L3Functions'] return ds3
def l5qc(cf,ds4,y): ds5 = copy.deepcopy(ds4) ds5.globalattributes['nc_level'] = 'L5' if (qcutils.cfkeycheck(cf,Base='Functions',ThisOne='L5_offline') and cf['Functions']['L5_offline'] == 'True') and qcutils.cfkeycheck(cf,Base='Functions',ThisOne='L5_keys'): try: ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions']+', '+cf['Functions']['L5_keys'] except: ds5.globalattributes['L5Functions'] = cf['Functions']['L5_keys'] y=y+1 # calculate u* from Fh and corrected wind speed if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='UstarFromFh') and cf['Functions']['UstarFromFh'] == 'True': try: ds5.globalattributes['L5Functions'] = ds4.globalattributes['L5Functions']+', UstarFromFh' except: ds4.globalattributes['L5Functions'] = 'UstarFromFh' qcts.UstarFromFh(cf,ds5) y=y+1 # calcluate ET at observation interval if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CalculateET') and cf['Functions']['CalculateET'] == 'True': try: ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions']+', CalculateET' except: ds5.globalattributes['L5Functions'] = 'CalculateET' log.info(' Calculating ET') qcts.CalculateET(cf,ds5,'L5') # calculate rst, rc and Gst, Gc from Penman-Monteith inversion if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='PenmanMonteith') and cf['Functions']['PenmanMonteith'] == 'True': try: ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions']+', PenmanMonteith' except: ds5.globalattributes['L5Functions'] = 'PenmanMonteith' qcts.do_PenmanMonteith(cf,ds5) # re-calculate the available energy from L5 (gapfilled) fluxes try: ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions']+', CalculateAvailableEnergy' except: ds.globalattributes['L5Functions'] = 'CalculateAvailableEnergy' qcts.CalculateAvailableEnergy(ds5) # re-apply the quality control checks (range, diurnal and rules) if y > 0: log.info(' Doing QC checks on L5 data') qcck.do_qcchecks(cf,ds5) try: ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions']+', do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)' except: ds5.globalattributes['L5Functions'] = 'do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)' try: ds5.globalattributes['Functions'] = ds5.globalattributes['Functions'] + ', ' + ds5.globalattributes['L5Functions'] except: ds5.globalattributes['Functions'] = ds5.globalattributes['Functions'] return ds5,y
def l3qc(cf, ds2): """ Corrections Generates L3 from L2 data Functions performed: qcts.AddMetVars (optional) qcts.CorrectSWC (optional*) qcck.do_linear (all sites) qcutils.GetMergeList + qcts.MergeSeries Ah_EC (optional)x qcts.TaFromTv (optional) qcutils.GetMergeList + qcts.MergeSeries Ta_EC (optional)x qcts.CoordRotation2D (all sites) qcts.MassmanApprox (optional*)y qcts.Massman (optional*)y qcts.CalculateFluxes (used if Massman not optioned)x qcts.CalculateFluxesRM (used if Massman optioned)y qcts.FhvtoFh (all sites) qcts.Fe_WPL (WPL computed on fluxes, as with Campbell algorithm)+x qcts.Fc_WPL (WPL computed on fluxes, as with Campbell algorithm)+x qcts.Fe_WPLcov (WPL computed on kinematic fluxes (ie, covariances), as with WPL80)+y qcts.Fc_WPLcov (WPL computed on kinematic fluxes (ie, covariances), as with WPL80)+y qcts.CalculateNetRadiation (optional) qcutils.GetMergeList + qcts.MergeSeries Fsd (optional) qcutils.GetMergeList + qcts.MergeSeries Fn (optional*) qcts.InterpolateOverMissing (optional) AverageSeriesByElements (optional) qcts.CorrectFgForStorage (all sites) qcts.Average3SeriesByElements (optional) qcts.CalculateAvailableEnergy (optional) qcck.do_qcchecks (all sites) qcck.gaps (optional) *: requires ancillary measurements for paratmerisation +: each site requires one pair, either Fe_WPL & Fc_WPL (default) or Fe_WPLCov & FcWPLCov x: required together in option set y: required together in option set """ # make a copy of the L2 data ds3 = copy.deepcopy(ds2) ds3.globalattributes['nc_level'] = 'L3' ds3.globalattributes['EPDversion'] = sys.version ds3.globalattributes['QC_version_history'] = cfg.__doc__ # put the control file name into the global attributes ds3.globalattributes['controlfile_name'] = cf['controlfile_name'] # calculate NDVI if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='NDVI') and cf['Functions']['NDVI'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', calculateNDVI' except: ds3.globalattributes['L3Functions'] = 'calculateNDVI' log.info(' Calculating NDVI from component reflectances ...') qcts.CalculateNDVI(cf, ds3) # bypass soil temperature correction for Sws (when Ts bad) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='BypassSwsTcorr' ) and cf['Functions']['BypassSwsTcorr'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', BypassSwsTcorr' except: ds3.globalattributes['L3Functions'] = 'BypassSwsTcorr' log.info(' Re-computing Sws without temperature correction ...') qcts.BypassTcorr(cf, ds3) # correct measured soil water content using empirical relationship to collected samples if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='CorrectSWC') and cf['Functions']['CorrectSWC'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CorrectSWC' except: ds3.globalattributes['L3Functions'] = 'CorrectSWC' log.info(' Correcting soil moisture data ...') qcts.CorrectSWC(cf, ds3) # apply linear corrections to the data if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', do_linear' except: ds3.globalattributes['L3Functions'] = 'do_linear' log.info(' Applying linear corrections ...') qcck.do_linear(cf, ds3) # determine HMP Ah if not output by datalogger if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='CalculateAh' ) and cf['Functions']['CalculateAh'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CalculateAh' except: ds3.globalattributes['L3Functions'] = 'CalculateAh' log.info(' Adding HMP Ah to database') qcts.CalculateAhHMP(cf, ds3) # merge the HMP and corrected 7500 data if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='MergeSeriesAhTa' ) and cf['Functions']['MergeSeriesAhTa'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', MergeSeriesAhTaCc' except: ds3.globalattributes['L3Functions'] = 'MergeSeriesAhTaCc' qcts.MergeSeries(cf, ds3, 'Ah', [0, 10]) qcts.MergeSeries(cf, ds3, 'Cc', [0, 10]) # get the air temperature from the CSAT virtual temperature try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', TaFromTv' except: ds3.globalattributes['L3Functions'] = 'TaFromTv' qcts.TaFromTv(cf, ds3) # merge the HMP and corrected CSAT data qcts.MergeSeries(cf, ds3, 'Ta', [0, 10]) # add relevant meteorological values to L3 data if (qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True') or (qcutils.cfkeycheck( cf, Base='Functions', ThisOne='CalculateMetVars') and cf['Functions']['CalculateMetVars'] == 'True'): try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CalculateMetVars' except: ds3.globalattributes['L3Functions'] = 'CalculateMetVars' log.info(' Adding standard met variables to database') qcts.CalculateMeteorologicalVariables(ds3) # do the 2D coordinate rotation if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CoordRotation2D' except: ds3.globalattributes['L3Functions'] = 'CoordRotation2D' qcts.CoordRotation2D(cf, ds3) # do the Massman frequency attenuation correction if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', Massman' except: ds3.globalattributes['L3Functions'] = 'Massman' qcts.MassmanStandard(cf, ds3) # calculate the fluxes if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CalculateFluxes' except: ds3.globalattributes['L3Functions'] = 'CalculateFluxes' qcts.CalculateFluxes(cf, ds3) # approximate wT from virtual wT using wA (ref: Campbell OPECSystem manual) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', FhvtoFh' except: ds3.globalattributes['L3Functions'] = 'FhvtoFh' qcts.FhvtoFh(cf, ds3) # correct the H2O & CO2 flux due to effects of flux on density measurements if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='WPLcov') and cf['Functions']['WPLcov'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', WPLcov' except: ds3.globalattributes['L3Functions'] = 'WPLcov' qcts.do_WPL(cf, ds3, cov='True') else: try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', WPL' except: ds3.globalattributes['L3Functions'] = 'WPL' qcts.do_WPL(cf, ds3) # calculate the net radiation from the Kipp and Zonen CNR1 if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='CalculateNetRadiation' ) and cf['Functions']['CalculateNetRadiation'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CalculateNetRadiation' except: ds3.globalattributes['L3Functions'] = 'CalculateNetRadiation' qcts.MergeSeries(cf, ds3, 'Fsd', [0, 10]) qcts.CalculateNetRadiation(ds3, 'Fn_KZ', 'Fsd', 'Fsu', 'Fld', 'Flu') qcts.MergeSeries(cf, ds3, 'Fn', [0, 10]) # combine wind speed from the CSAT and the Wind Sentry if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='MergeSeriesWS' ) and cf['Functions']['MergeSeriesWS'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', MergeSeriesWS' except: ds3.globalattributes['L3Functions'] = 'MergeSeriesWS' qcts.MergeSeries(cf, ds3, 'Ws', [0, 10]) # average the soil temperature data if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': if 'SoilAverage' not in ds3.globalattributes['L3Functions']: try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', SoilAverage' except: ds3.globalattributes['L3Functions'] = 'SoilAverage' # interpolate over any ramaining gaps up to 3 hours in length qcts.AverageSeriesByElementsI(cf, ds3, 'Ts') qcts.AverageSeriesByElementsI(cf, ds3, 'Sws') # correct the measured soil heat flux for storage in the soil layer above the sensor if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CorrectFgForStorage' except: ds3.globalattributes['L3Functions'] = 'CorrectFgForStorage' if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='IndividualFgCorrection' ) and cf['Functions']['IndividualFgCorrection'] == 'True': qcts.CorrectIndividualFgForStorage(cf, ds3) qcts.AverageSeriesByElementsI(cf, ds3, 'Fg') else: qcts.AverageSeriesByElementsI(cf, ds3, 'Fg') qcts.CorrectGroupFgForStorage(cf, ds3) # calculate the available energy if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='CalculateAvailableEnergy' ) and cf['Functions']['CalculateAvailableEnergy'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CalculateAvailableEnergy' except: ds3.globalattributes['L3Functions'] = 'CalculateAvailableEnergy' qcts.CalculateAvailableEnergy(ds3) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='DiagnosticMode'): if cf['Functions']['DiagnosticMode'] == 'False': qcutils.prepOzFluxVars(cf, ds3) else: qcutils.prepOzFluxVars(cf, ds3) # calculate specific humidity and saturated specific humidity profile if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='qTprofile') and cf['Functions']['qTprofile'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', qTprofile' except: ds3.globalattributes['L3Functions'] = 'qTprofile' qcts.CalculateSpecificHumidityProfile(cf, ds3) # calculate Penman-Monteith inversion if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='PenmanMonteith' ) and cf['Functions']['PenmanMonteith'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', PenmanMonteith' except: ds3.globalattributes['L3Functions'] = 'PenmanMonteith' qcts.do_PenmanMonteith(cf, ds3) # calculate bulk Richardson numbers if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='bulkRichardson' ) and cf['Functions']['bulkRichardson'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', bulkRichardson' except: ds3.globalattributes['L3Functions'] = 'bulkRichardson' qcts.do_bulkRichardson(cf, ds3) # re-apply the quality control checks (range, diurnal and rules) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)' qcck.do_qcchecks(cf, ds3) # quality control checks (range, diurnal and rules) without flux post-processing if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='QCChecks') and cf['Functions']['QCChecks'] == 'True': qcck.do_qcchecks(cf, ds3) # apply the ustar filter if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='ustarFilter' ) and cf['Functions']['ustarFilter'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', ustarFilter' except: ds3.globalattributes['L3Functions'] = 'ustarFilter' qcts.FilterFcByUstar(cf, ds3) # coordinate gaps in the three main fluxes if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='CoordinateFluxGaps' ) and cf['Functions']['CoordinateFluxGaps'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CoordinateFluxGaps' except: ds3.globalattributes['L3Functions'] = 'CoordinateFluxGaps' qcck.CoordinateFluxGaps(cf, ds3) # coordinate gaps in Ah_7500_Av with Fc if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CoordinateAh7500AndFcGaps' except: ds3.globalattributes['L3Functions'] = 'CoordinateAh7500AndFcGaps' qcck.CoordinateAh7500AndFcGaps(cf, ds3) # calcluate ET at observation interval if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='CalculateET' ) and cf['Functions']['CalculateET'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CalculateET' except: ds3.globalattributes['L3Functions'] = 'CalculateET' log.info(' Calculating ET') qcts.CalculateET(cf, ds3, 'L3') # run MOST (Buckingham Pi) 2d footprint model (Kljun et al. 2004) if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='footprint') and cf['Functions']['footprint'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', footprint' except: ds3.globalattributes['L3Functions'] = 'footprint' qcts.do_footprint_2d(cf, ds3) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': qcio.get_seriesstats(cf, ds3) # convert Fc [mgCO2 m-2 s-1] to Fc_co2 [mgCO2 m-2 s-1], Fc_c [mgC m-2 s-1], NEE [umol m-2 s-1] and NEP = - NEE if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='convertFc') and cf['Functions']['convertFc'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', convertFc' except: ds3.globalattributes['L3Functions'] = 'convertFc' qcts.ConvertFc(cf, ds3) # convert Fc [mgCO2 m-2 s-1] to Fc [umol m-2 s-1] if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='JasonFc') and cf['Functions']['JasonFc'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', convertFc (umol only)' except: ds3.globalattributes['L3Functions'] = 'convertFc (umol only)' qcts.ConvertFcJason(cf, ds3) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds3) # write the percentage of good data for groups qcutils.get_coverage_groups(ds3) # compute water-use efficiency from flux-gradient similarity (appendix A, Scanlon & Sahu 2008) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='wue') and cf['Functions']['wue'] == 'True': try: ds3.globalattributes[ 'L3Functions'] = ds3.globalattributes['L3Functions'] + ', wue' except: ds3.globalattributes['L3Functions'] = 'wue' log.info( ' Calculating water-use efficiency from flux-gradient similarity') qcts.CalculateWUEfromSimilarity(cf, ds3) # compute climatology for L3 data if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='climatology' ) and cf['Functions']['climatology'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', climatology' except: ds3.globalattributes['L3Functions'] = 'climatology' qcts.do_climatology(cf, ds3) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Sums') and cf['Functions']['Sums'] == 'L3': try: ds3.globalattributes[ 'L3Functions'] = ds3.globalattributes['L5Functions'] + ', Sums' except: ds3.globalattributes['L3Functions'] = 'Sums' qcts.do_sums(cf, ds3) try: ds3.globalattributes['Functions'] = ds3.globalattributes[ 'Functions'] + ', ' + ds3.globalattributes['L3Functions'] except: ds3.globalattributes['Functions'] = ds3.globalattributes['L3Functions'] return ds3
def l4qc(cf,ds3,InLevel,x): ds4 = copy.deepcopy(ds3) ds4.globalattributes['nc_level'] = 'L4' if (qcutils.cfkeycheck(cf,Base='Functions',ThisOne='L4_offline') and cf['Functions']['L4_offline'] == 'True') and qcutils.cfkeycheck(cf,Base='Functions',ThisOne='L4_keys'): try: ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions']+', '+cf['Functions']['L4_keys'] except: ds4.globalattributes['L4Functions'] = cf['Functions']['L4_keys'] x=x+1 # determine HMP Ah if not output by datalogger if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CalculateAh') and cf['Functions']['CalculateAh'] == 'True': try: ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions']+', CalculateAh' except: ds4.globalattributes['L4Functions'] = 'CalculateAh' log.info(' Adding HMP Ah to database') qcts.CalculateAhHMP(cf,ds4) # add relevant meteorological values to L4 data if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CalculateMetVars') and cf['Functions']['CalculateMetVars'] == 'True': try: ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions']+', CalculateMetVars' except: ds4.globalattributes['L4Functions'] = 'CalculateMetVars' log.info(' Adding standard met variables to database') qcts.CalculateMeteorologicalVariables(ds4) # merge CSAT and wind sentry wind speed if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='MergeSeriesWS') and cf['Functions']['MergeSeriesWS'] == 'True': try: ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions']+', MergeSeriesWS' except: ds4.globalattributes['L4Functions'] = 'MergeSeriesWS' qcts.MergeSeries(cf,ds4,'Ws',[0,10]) # linear interpolation to fill missing values over gaps of 1 hour if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='InterpolateOverMissing') and cf['Functions']['InterpolateOverMissing'] == 'True': try: ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions']+', InterpolateOverMissing' except: ds4.globalattributes['L4Functions'] = 'InterpolateOverMissing' log.info(' Gap filling by linear interpolation to fill missing values over gaps of 1 hour') for ThisOne in cf['InterpolateVars'].keys(): qcts.InterpolateOverMissing(cf,ds4,series=ThisOne,maxlen=2) x=x+1 # re-apply the quality control checks (range, diurnal and rules) if x > 0: log.info(' Doing QC checks on L4 data') qcck.do_qcchecks(cf,ds4) try: ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions']+', do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)' except: ds4.globalattributes['L4Functions'] = 'do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)' # interpolate over any ramaining gaps up to 3 hours in length if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='InterpolateOverMissing') and cf['Functions']['InterpolateOverMissing'] == 'True': for ThisOne in cf['InterpolateVars'].keys(): qcts.InterpolateOverMissing(cf,ds4,series=ThisOne,maxlen=6) ds4.globalattributes['Functions'] = ds4.globalattributes['Functions'] + ', ' + ds4.globalattributes['L4Functions'] return ds4,x