def l1qc(cf, ds): ds.globalattributes['nc_level'] = 'L1' ds.globalattributes['EPDversion'] = sys.version ds.globalattributes['QC_version_history'] = cfg.__doc__ # put the control file name into the global attributes ds.globalattributes['controlfile_name'] = cf['controlfile_name'] # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds) # write the percentage of good data for groups qcutils.get_coverage_groups(ds) return ds
def l1qc(cf,ds): ds.globalattributes['nc_level'] = 'L1' ds.globalattributes['EPDversion'] = sys.version ds.globalattributes['QC_version_history'] = cfg.__doc__ # put the control file name into the global attributes ds.globalattributes['controlfile_name'] = cf['controlfile_name'] # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds) # write the percentage of good data for groups qcutils.get_coverage_groups(ds) return ds
def l6qc(cf, ds5): ds6 = qcio.copy_datastructure(cf, ds5) # ds6 will be empty (logical false) if an error occurs in copy_datastructure # return from this routine if this is the case if not ds6: return ds6 # set some attributes for this level qcutils.UpdateGlobalAttributes(cf, ds6, "L6") # parse the control file qcrp.ParseL6ControlFile(cf, ds6) # check to see if we have any imports qcgf.ImportSeries(cf, ds6) # check units qcutils.CheckUnits(ds6, "Fc", "umol/m2/s", convert_units=True) ## filter Fc for night time and ustar threshold, write to ds as "ER" #result = qcrp.GetERFromFc(cf,ds6) #if result==0: return # apply the turbulence filter (if requested) qcck.ApplyTurbulenceFilter(cf, ds6) qcrp.GetERFromFc2(cf, ds6) # estimate ER using SOLO qcrp.ERUsingSOLO(cf, ds6) # estimate ER using FFNET qcrp.ERUsingFFNET(cf, ds6) # estimate ER using Lloyd-Taylor qcrp.ERUsingLloydTaylor(cf, ds6) # estimate ER using Lasslop et al qcrp.ERUsingLasslop(cf, ds6) # merge the estimates of ER with the observations qcts.MergeSeriesUsingDict(ds6, merge_order="standard") # calculate NEE from Fc and ER qcrp.CalculateNEE(cf, ds6) # calculate NEP from NEE qcrp.CalculateNEP(cf, ds6) # calculate ET from Fe qcrp.CalculateET(ds6) # partition NEE into GPP and ER qcrp.PartitionNEE(cf, ds6) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds6) # write the percentage of good data for groups qcutils.get_coverage_groups(ds6) # do the L6 summary qcrp.L6_summary(cf, ds6) return ds6
def l2qc(cf, ds1): """ Perform initial QA/QC on flux data Generates L2 from L1 data * check parameters specified in control file Functions performed: qcck.do_rangecheck* qcck.do_CSATcheck qcck.do_7500check qcck.do_diurnalcheck* qcck.do_excludedates* qcck.do_excludehours* qcts.albedo """ # make a copy of the L1 data ds2 = copy.deepcopy(ds1) ds2.globalattributes['nc_level'] = 'L2' ds2.globalattributes['EPDversion'] = sys.version ds2.globalattributes['QC_version_history'] = cfg.__doc__ ds2.globalattributes[ 'L2Functions'] = 'do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours), CSATcheck, 7500check, albedo' ds2.globalattributes[ 'Functions'] = 'do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours), CSATcheck, 7500check, albedo' # put the control file name into the global attributes ds2.globalattributes['controlfile_name'] = cf['controlfile_name'] # apply the quality control checks (range, diurnal, exclude dates and exclude hours qcck.do_qcchecks(cf, ds2) # do the CSAT diagnostic check qcck.do_CSATcheck(cf, ds2) # do the LI-7500 diagnostic check qcck.do_7500check(cf, ds2) # constrain albedo estimates to full sun angles qcts.albedo(cf, ds2) log.info(' Finished the albedo constraints' ) # apply linear corrections to the data log.info(' Applying linear corrections ...') qcck.do_linear(cf, ds2) # write series statistics to file qcio.get_seriesstats(cf, ds2) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds2) # write the percentage of good data for groups qcutils.get_coverage_groups(ds2) return ds2
def l6qc(cf,ds5): ds6 = qcio.copy_datastructure(cf,ds5) # ds6 will be empty (logical false) if an error occurs in copy_datastructure # return from this routine if this is the case if not ds6: return ds6 # set some attributes for this level qcutils.UpdateGlobalAttributes(cf,ds6,"L6") # parse the control file qcrp.ParseL6ControlFile(cf,ds6) # check to see if we have any imports qcgf.ImportSeries(cf,ds6) # check units qcutils.CheckUnits(ds6,"Fc","umol/m2/s",convert_units=True) ## filter Fc for night time and ustar threshold, write to ds as "ER" #result = qcrp.GetERFromFc(cf,ds6) #if result==0: return # apply the turbulence filter (if requested) qcck.ApplyTurbulenceFilter(cf,ds6) qcrp.GetERFromFc2(cf,ds6) # estimate ER using SOLO qcrp.ERUsingSOLO(cf,ds6) # estimate ER using FFNET qcrp.ERUsingFFNET(cf,ds6) # estimate ER using Lloyd-Taylor qcrp.ERUsingLloydTaylor(cf,ds6) # estimate ER using Lasslop et al qcrp.ERUsingLasslop(cf,ds6) # merge the estimates of ER with the observations qcts.MergeSeriesUsingDict(ds6,merge_order="standard") # calculate NEE from Fc and ER qcrp.CalculateNEE(cf,ds6) # calculate NEP from NEE qcrp.CalculateNEP(cf,ds6) # calculate ET from Fe qcrp.CalculateET(ds6) # partition NEE into GPP and ER qcrp.PartitionNEE(cf,ds6) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds6) # write the percentage of good data for groups qcutils.get_coverage_groups(ds6) # do the L6 summary qcrp.L6_summary(cf,ds6) return ds6
def l5qc(cf, ds4): ds5 = qcio.copy_datastructure(cf, ds4) # ds4 will be empty (logical false) if an error occurs in copy_datastructure # return from this routine if this is the case if not ds5: return ds5 # set some attributes for this level qcutils.UpdateGlobalAttributes(cf, ds5, "L5") ds5.cf = cf # create a dictionary to hold the gap filling data ds_alt = {} # check to see if we have any imports qcgf.ImportSeries(cf, ds5) # re-apply the quality control checks (range, diurnal and rules) qcck.do_qcchecks(cf, ds5) # now do the flux gap filling methods label_list = qcutils.get_label_list_from_cf(cf) for label in label_list: # parse the control file for information on how the user wants to do the gap filling qcgf.GapFillParseControlFile(cf, ds5, label, ds_alt) # *** start of the section that does the gap filling of the fluxes *** # apply the turbulence filter (if requested) qcck.ApplyTurbulenceFilter(cf, ds5) # fill short gaps using interpolation qcgf.GapFillUsingInterpolation(cf, ds5) # do the gap filling using SOLO qcgfSOLO.GapFillUsingSOLO(cf, ds4, ds5) if ds5.returncodes["solo"] == "quit": return ds5 # gap fill using marginal distribution sampling qcgfMDS.GapFillFluxUsingMDS(cf, ds5) # gap fill using climatology qcgf.GapFillFromClimatology(ds5) # merge the gap filled drivers into a single series qcts.MergeSeriesUsingDict(ds5, merge_order="standard") # calculate Monin-Obukhov length qcts.CalculateMoninObukhovLength(ds5) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds5) # write the percentage of good data for groups qcutils.get_coverage_groups(ds5) return ds5
def l2qc(cf,ds1): """ Perform initial QA/QC on flux data Generates L2 from L1 data * check parameters specified in control file Functions performed: qcck.do_rangecheck* qcck.do_CSATcheck qcck.do_7500check qcck.do_diurnalcheck* qcck.do_excludedates* qcck.do_excludehours* qcts.albedo """ # make a copy of the L1 data ds2 = copy.deepcopy(ds1) ds2.globalattributes['nc_level'] = 'L2' ds2.globalattributes['EPDversion'] = sys.version ds2.globalattributes['QC_version_history'] = cfg.__doc__ ds2.globalattributes['L2Functions'] = 'do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours), CSATcheck, 7500check, albedo' ds2.globalattributes['Functions'] = 'do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours), CSATcheck, 7500check, albedo' # put the control file name into the global attributes ds2.globalattributes['controlfile_name'] = cf['controlfile_name'] # apply the quality control checks (range, diurnal, exclude dates and exclude hours qcck.do_qcchecks(cf,ds2) # do the CSAT diagnostic check qcck.do_CSATcheck(cf,ds2) # do the LI-7500 diagnostic check qcck.do_7500check(cf,ds2) # constrain albedo estimates to full sun angles qcts.albedo(cf,ds2) log.info(' Finished the albedo constraints') # apply linear corrections to the data log.info(' Applying linear corrections ...') qcck.do_linear(cf,ds2) # write series statistics to file qcio.get_seriesstats(cf,ds2) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds2) # write the percentage of good data for groups qcutils.get_coverage_groups(ds2) return ds2
def l2qc(cf, ds1): """ Perform initial QA/QC on flux data Generates L2 from L1 data * check parameters specified in control file Functions performed: qcck.do_rangecheck* qcck.do_CSATcheck qcck.do_7500check qcck.do_diurnalcheck* qcck.do_excludedates* qcck.do_excludehours* qcts.albedo """ # make a copy of the L1 data ds2 = copy.deepcopy(ds1) # set some attributes for this level qcutils.UpdateGlobalAttributes(cf, ds2, "L2") ds2.globalattributes['Functions'] = '' # put the control file name into the global attributes ds2.globalattributes['controlfile_name'] = cf['controlfile_name'] # apply the quality control checks (range, diurnal, exclude dates and exclude hours qcck.do_qcchecks(cf, ds2) # do the CSAT diagnostic check qcck.do_SONICcheck(cf, ds2) # do the IRGA diagnostic check qcck.do_IRGAcheck(cf, ds2) # constrain albedo estimates to full sun angles #qcts.albedo(cf,ds2) #log.info(' Finished the albedo constraints') # apply linear corrections to the data #log.info(' Applying linear corrections ...') qcck.do_linear(cf, ds2) # check missing data and QC flags are consistent qcutils.CheckQCFlags(ds2) # write series statistics to file qcio.get_seriesstats(cf, ds2) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds2) return ds2
def l5qc(cf,ds4): ds5 = qcio.copy_datastructure(cf,ds4) # ds4 will be empty (logical false) if an error occurs in copy_datastructure # return from this routine if this is the case if not ds5: return ds5 # set some attributes for this level qcutils.UpdateGlobalAttributes(cf,ds5,"L5") ds5.cf = cf # create a dictionary to hold the gap filling data ds_alt = {} # check to see if we have any imports qcgf.ImportSeries(cf,ds5) # re-apply the quality control checks (range, diurnal and rules) qcck.do_qcchecks(cf,ds5) # now do the flux gap filling methods label_list = qcutils.get_label_list_from_cf(cf) for ThisOne in label_list: # parse the control file for information on how the user wants to do the gap filling qcgf.GapFillParseControlFile(cf,ds5,ThisOne,ds_alt) # *** start of the section that does the gap filling of the fluxes *** # apply the turbulence filter (if requested) qcck.ApplyTurbulenceFilter(cf,ds5) # fill short gaps using interpolation #qcgf.GapFillUsingInterpolation(cf,ds5) # do the gap filling using SOLO qcgf.GapFillUsingSOLO(cf,ds4,ds5) if ds5.returncodes["solo"]=="quit": return ds5 ## gap fill using marginal distribution sampling #qcgf.GapFillFluxUsingMDS(cf,ds5) ## gap fill using ratios #qcgf.GapFillFluxFromDayRatio(cf,ds5) # gap fill using climatology qcgf.GapFillFromClimatology(ds5) # merge the gap filled drivers into a single series qcts.MergeSeriesUsingDict(ds5,merge_order="standard") # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds5) # write the percentage of good data for groups qcutils.get_coverage_groups(ds5) return ds5
def l6qc(cf,ds5): ds6 = qcio.copy_datastructure(cf,ds5) # ds6 will be empty (logical false) if an error occurs in copy_datastructure # return from this routine if this is the case if not ds6: return ds6 # set some attributes for this level qcutils.UpdateGlobalAttributes(cf,ds6,"L6") # parse the control file qcrp.ParseL6ControlFile(cf,ds6) # check to see if we have any imports qcgf.ImportSeries(cf,ds6) # filter Fc for night time and ustar threshold, write to ds as "Fre" #qcrp.GetFreIndicator(cf,ds6) qcrp.GetFreFromFc(cf,ds6) # estimate Reco using SOLO qcrp.FreUsingSOLO(cf,ds6) # estimate Reco using FFNET qcrp.FreUsingFFNET(cf,ds6) # estimate Reco using Lloyd-Taylor qcrp.FreUsingLloydTaylor(cf,ds6) # estimate Reco using Lasslop et al qcrp.FreUsingLasslop(cf,ds6) # merge the estimates of Reco with the observations qcts.MergeSeriesUsingDict(ds6,merge_order="standard") # calculate NEE from Fc and Fre qcrp.CalculateNEE(cf,ds6) # calculate NEP from NEE qcrp.CalculateNEP(cf,ds6) # calculate ET from Fe qcrp.CalculateET(ds6) # partition NEE into GPP and Reco qcrp.PartitionNEE(cf,ds6) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds6) # write the percentage of good data for groups qcutils.get_coverage_groups(ds6) # do the L6 summary qcrp.L6_summary(cf,ds6) return ds6
def l2qc(cf,ds1): """ Perform initial QA/QC on flux data Generates L2 from L1 data * check parameters specified in control file Functions performed: qcck.do_rangecheck* qcck.do_CSATcheck qcck.do_7500check qcck.do_diurnalcheck* qcck.do_excludedates* qcck.do_excludehours* qcts.albedo """ # make a copy of the L1 data ds2 = copy.deepcopy(ds1) # set some attributes for this level qcutils.UpdateGlobalAttributes(cf,ds2,"L2") ds2.globalattributes['Functions'] = '' # put the control file name into the global attributes ds2.globalattributes['controlfile_name'] = cf['controlfile_name'] # apply the quality control checks (range, diurnal, exclude dates and exclude hours qcck.do_qcchecks(cf,ds2) # do the CSAT diagnostic check qcck.do_CSATcheck(cf,ds2) # do the IRGA diagnostic check qcck.do_IRGAcheck(cf,ds2) # constrain albedo estimates to full sun angles #qcts.albedo(cf,ds2) #log.info(' Finished the albedo constraints') # apply linear corrections to the data #log.info(' Applying linear corrections ...') qcck.do_linear(cf,ds2) # write series statistics to file qcio.get_seriesstats(cf,ds2) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds2) return ds2
def l3qc(cf,ds2): """ Corrections Generates L3 from L2 data Functions performed: qcts.AddMetVars (optional) qcts.CorrectSWC (optional*) qcck.do_linear (all sites) qcutils.GetMergeList + qcts.MergeSeries Ah_EC (optional)x qcts.TaFromTv (optional) qcutils.GetMergeList + qcts.MergeSeries Ta_EC (optional)x qcts.CoordRotation2D (all sites) qcts.MassmanApprox (optional*)y qcts.Massman (optional*)y qcts.CalculateFluxes (used if Massman not optioned)x qcts.CalculateFluxesRM (used if Massman optioned)y qcts.FhvtoFh (all sites) qcts.Fe_WPL (WPL computed on fluxes, as with Campbell algorithm)+x qcts.Fc_WPL (WPL computed on fluxes, as with Campbell algorithm)+x qcts.Fe_WPLcov (WPL computed on kinematic fluxes (ie, covariances), as with WPL80)+y qcts.Fc_WPLcov (WPL computed on kinematic fluxes (ie, covariances), as with WPL80)+y qcts.CalculateNetRadiation (optional) qcutils.GetMergeList + qcts.MergeSeries Fsd (optional) qcutils.GetMergeList + qcts.MergeSeries Fn (optional*) qcts.InterpolateOverMissing (optional) AverageSeriesByElements (optional) qcts.CorrectFgForStorage (all sites) qcts.Average3SeriesByElements (optional) qcts.CalculateAvailableEnergy (optional) qcck.do_qcchecks (all sites) qcck.gaps (optional) *: requires ancillary measurements for paratmerisation +: each site requires one pair, either Fe_WPL & Fc_WPL (default) or Fe_WPLCov & FcWPLCov x: required together in option set y: required together in option set """ # make a copy of the L2 data ds3 = copy.deepcopy(ds2) ds3.globalattributes['nc_level'] = 'L3' ds3.globalattributes['EPDversion'] = sys.version ds3.globalattributes['QC_version_history'] = cfg.__doc__ # put the control file name into the global attributes ds3.globalattributes['controlfile_name'] = cf['controlfile_name'] # calculate NDVI if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='NDVI') and cf['Functions']['NDVI'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', calculateNDVI' except: ds3.globalattributes['L3Functions'] = 'calculateNDVI' log.info(' Calculating NDVI from component reflectances ...') qcts.CalculateNDVI(cf,ds3) # bypass soil temperature correction for Sws (when Ts bad) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='BypassSwsTcorr') and cf['Functions']['BypassSwsTcorr'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', BypassSwsTcorr' except: ds3.globalattributes['L3Functions'] = 'BypassSwsTcorr' log.info(' Re-computing Sws without temperature correction ...') qcts.BypassTcorr(cf,ds3) # correct measured soil water content using empirical relationship to collected samples if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CorrectSWC') and cf['Functions']['CorrectSWC'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CorrectSWC' except: ds3.globalattributes['L3Functions'] = 'CorrectSWC' log.info(' Correcting soil moisture data ...') qcts.CorrectSWC(cf,ds3) # apply linear corrections to the data if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', do_linear' except: ds3.globalattributes['L3Functions'] = 'do_linear' log.info(' Applying linear corrections ...') qcck.do_linear(cf,ds3) # determine HMP Ah if not output by datalogger if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CalculateAh') and cf['Functions']['CalculateAh'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CalculateAh' except: ds3.globalattributes['L3Functions'] = 'CalculateAh' log.info(' Adding HMP Ah to database') qcts.CalculateAhHMP(cf,ds3) # merge the HMP and corrected 7500 data if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='MergeSeriesAhTa') and cf['Functions']['MergeSeriesAhTa'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', MergeSeriesAhTaCc' except: ds3.globalattributes['L3Functions'] = 'MergeSeriesAhTaCc' qcts.MergeSeries(cf,ds3,'Ah',[0,10]) qcts.MergeSeries(cf,ds3,'Cc',[0,10]) # get the air temperature from the CSAT virtual temperature try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', TaFromTv' except: ds3.globalattributes['L3Functions'] = 'TaFromTv' qcts.TaFromTv(cf,ds3) # merge the HMP and corrected CSAT data qcts.MergeSeries(cf,ds3,'Ta',[0,10]) # add relevant meteorological values to L3 data if (qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True') or (qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CalculateMetVars') and cf['Functions']['CalculateMetVars'] == 'True'): try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CalculateMetVars' except: ds3.globalattributes['L3Functions'] = 'CalculateMetVars' log.info(' Adding standard met variables to database') qcts.CalculateMeteorologicalVariables(ds3) # do the 2D coordinate rotation if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CoordRotation2D' except: ds3.globalattributes['L3Functions'] = 'CoordRotation2D' qcts.CoordRotation2D(cf,ds3) # do the Massman frequency attenuation correction if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', Massman' except: ds3.globalattributes['L3Functions'] = 'Massman' qcts.MassmanStandard(cf,ds3) # calculate the fluxes if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CalculateFluxes' except: ds3.globalattributes['L3Functions'] = 'CalculateFluxes' qcts.CalculateFluxes(cf,ds3) # approximate wT from virtual wT using wA (ref: Campbell OPECSystem manual) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', FhvtoFh' except: ds3.globalattributes['L3Functions'] = 'FhvtoFh' qcts.FhvtoFh(cf,ds3) # correct the H2O & CO2 flux due to effects of flux on density measurements if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='WPLcov') and cf['Functions']['WPLcov'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', WPLcov' except: ds3.globalattributes['L3Functions'] = 'WPLcov' qcts.do_WPL(cf,ds3,cov='True') else: try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', WPL' except: ds3.globalattributes['L3Functions'] = 'WPL' qcts.do_WPL(cf,ds3) # calculate the net radiation from the Kipp and Zonen CNR1 if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CalculateNetRadiation') and cf['Functions']['CalculateNetRadiation'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CalculateNetRadiation' except: ds3.globalattributes['L3Functions'] = 'CalculateNetRadiation' qcts.MergeSeries(cf,ds3,'Fsd',[0,10]) qcts.CalculateNetRadiation(ds3,'Fn_KZ','Fsd','Fsu','Fld','Flu') qcts.MergeSeries(cf,ds3,'Fn',[0,10]) # combine wind speed from the CSAT and the Wind Sentry if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='MergeSeriesWS') and cf['Functions']['MergeSeriesWS'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', MergeSeriesWS' except: ds3.globalattributes['L3Functions'] = 'MergeSeriesWS' qcts.MergeSeries(cf,ds3,'Ws',[0,10]) # average the soil temperature data if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': if 'SoilAverage' not in ds3.globalattributes['L3Functions']: try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', SoilAverage' except: ds3.globalattributes['L3Functions'] = 'SoilAverage' # interpolate over any ramaining gaps up to 3 hours in length qcts.AverageSeriesByElementsI(cf,ds3,'Ts') qcts.AverageSeriesByElementsI(cf,ds3,'Sws') # correct the measured soil heat flux for storage in the soil layer above the sensor if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CorrectFgForStorage' except: ds3.globalattributes['L3Functions'] = 'CorrectFgForStorage' if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='IndividualFgCorrection') and cf['Functions']['IndividualFgCorrection'] == 'True': qcts.CorrectIndividualFgForStorage(cf,ds3) qcts.AverageSeriesByElementsI(cf,ds3,'Fg') else: qcts.AverageSeriesByElementsI(cf,ds3,'Fg') qcts.CorrectGroupFgForStorage(cf,ds3) # calculate the available energy if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CalculateAvailableEnergy') and cf['Functions']['CalculateAvailableEnergy'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CalculateAvailableEnergy' except: ds3.globalattributes['L3Functions'] = 'CalculateAvailableEnergy' qcts.CalculateAvailableEnergy(ds3) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='DiagnosticMode'): if cf['Functions']['DiagnosticMode'] == 'False': qcutils.prepOzFluxVars(cf,ds3) else: qcutils.prepOzFluxVars(cf,ds3) # calculate specific humidity and saturated specific humidity profile if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='qTprofile') and cf['Functions']['qTprofile'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', qTprofile' except: ds3.globalattributes['L3Functions'] = 'qTprofile' qcts.CalculateSpecificHumidityProfile(cf,ds3) # calculate Penman-Monteith inversion if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='PenmanMonteith') and cf['Functions']['PenmanMonteith'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', PenmanMonteith' except: ds3.globalattributes['L3Functions'] = 'PenmanMonteith' qcts.do_PenmanMonteith(cf,ds3) # calculate bulk Richardson numbers if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='bulkRichardson') and cf['Functions']['bulkRichardson'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', bulkRichardson' except: ds3.globalattributes['L3Functions'] = 'bulkRichardson' qcts.do_bulkRichardson(cf,ds3) # re-apply the quality control checks (range, diurnal and rules) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)' qcck.do_qcchecks(cf,ds3) # apply the ustar filter if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='ustarFilter') and cf['Functions']['ustarFilter'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', ustarFilter' except: ds3.globalattributes['L3Functions'] = 'ustarFilter' qcts.FilterFcByUstar(cf,ds3) # coordinate gaps in the three main fluxes if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CoordinateFluxGaps') and cf['Functions']['CoordinateFluxGaps'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CoordinateFluxGaps' except: ds3.globalattributes['L3Functions'] = 'CoordinateFluxGaps' qcck.CoordinateFluxGaps(cf,ds3) # coordinate gaps in Ah_7500_Av with Fc if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CoordinateAh7500AndFcGaps' except: ds3.globalattributes['L3Functions'] = 'CoordinateAh7500AndFcGaps' qcck.CoordinateAh7500AndFcGaps(cf,ds3) # calcluate ET at observation interval if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='CalculateET') and cf['Functions']['CalculateET'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', CalculateET' except: ds3.globalattributes['L3Functions'] = 'CalculateET' log.info(' Calculating ET') qcts.CalculateET(cf,ds3,'L3') # run MOST (Buckingham Pi) 2d footprint model (Kljun et al. 2004) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='footprint') and cf['Functions']['footprint'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', footprint' except: ds3.globalattributes['L3Functions'] = 'footprint' qcts.do_footprint_2d(cf,ds3) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True': qcio.get_seriesstats(cf,ds3) # convert Fc [mgCO2 m-2 s-1] to Fc_co2 [mgCO2 m-2 s-1], Fc_c [mgC m-2 s-1], NEE [umol m-2 s-1] and NEP = - NEE if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='convertFc') and cf['Functions']['convertFc'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', convertFc' except: ds3.globalattributes['L3Functions'] = 'convertFc' qcts.ConvertFc(cf,ds3) # convert Fc [mgCO2 m-2 s-1] to Fc [umol m-2 s-1] if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='JasonFc') and cf['Functions']['JasonFc'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', convertFc (umol only)' except: ds3.globalattributes['L3Functions'] = 'convertFc (umol only)' qcts.ConvertFcJason(cf,ds3) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds3) # write the percentage of good data for groups qcutils.get_coverage_groups(ds3) # compute climatology for L3 data if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='climatology') and cf['Functions']['climatology'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L3Functions']+', climatology' except: ds3.globalattributes['L3Functions'] = 'climatology' qcts.do_climatology(cf,ds3) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Sums') and cf['Functions']['Sums'] == 'L3': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes['L5Functions']+', Sums' except: ds3.globalattributes['L3Functions'] = 'Sums' qcts.do_sums(cf,ds3) try: ds3.globalattributes['Functions'] = ds3.globalattributes['Functions'] + ', ' + ds3.globalattributes['L3Functions'] except: ds3.globalattributes['Functions'] = ds3.globalattributes['L3Functions'] return ds3
def l4to6qc(cf,ds3,AttrLevel,InLevel,OutLevel): """ Fill gaps in met data from other sources Integrate SOLO-ANN gap filled fluxes performed externally Generates L4 from L3 data Generates daily sums excel workbook Variable Series: Meteorological (MList): Ah_EC, Cc_7500_Av, ps, Ta_EC, Ws_CSAT, Wd_CSAT Radiation (RList): Fld, Flu, Fn, Fsd, Fsu Soil water content (SwsList): all variables containing Sws in variable name Soil (SList): Fg, Ts, SwsList Turbulent fluxes (FList): Fc_wpl, Fe_wpl, Fh, ustar Output (OList): MList, RList, SList, FList Parameters loaded from control file: zmd: z-d z0: roughness height Functions performed: qcts.AddMetVars qcts.ComputeDailySums qcts.InterpolateOverMissing (OList for gaps shorter than 3 observations, OList gaps shorter than 7 observations) qcts.GapFillFromAlternate (MList, RList) qcts.GapFillFromClimatology (Ah_EC, Fn, Fg, ps, Ta_EC, Ws_CSAT, OList) qcts.GapFillFromRatios (Fe, Fh, Fc) qcts.ReplaceOnDiff (Ws_CSAT, ustar) qcts.UstarFromFh qcts.ReplaceWhereMissing (Ustar) qcck.do_qcchecks """ if AttrLevel == 'False': ds3.globalattributes['Functions'] = '' AttrLevel = InLevel # check to ensure L4 functions are defined in controlfile if qcutils.cfkeycheck(cf,Base='Functions'): x=0 y=0 z=0 else: log.error('FunctionList not found in control file') ds3x = copy.deepcopy(ds3) ds3x.globalattributes['nc_level'] = 'L3' ds3x.globalattributes['L4Functions'] = 'No L4-L6 functions applied' return ds3x # handle meta-data and import L4-L6 from external process if InLevel == 'L3': ds3x = copy.deepcopy(ds3) else: infilename = qcio.get_infilename_from_cf(cf,InLevel) ds3x = qcio.nc_read_series(infilename) for ThisOne in ds3.globalattributes.keys(): if ThisOne not in ds3x.globalattributes.keys(): ds3x.globalattributes[ThisOne] = ds3.globalattributes[ThisOne] for ThisOne in ds3.series.keys(): if ThisOne in ds3x.series.keys(): for attr in ds3.series[ThisOne]['Attr'].keys(): if attr not in ['ancillary_variables','long_name','standard_name','units']: ds3x.series[ThisOne]['Attr'][attr] = ds3.series[ThisOne]['Attr'][attr] ds3x.globalattributes['nc_level'] = AttrLevel ds3x.globalattributes['EPDversion'] = sys.version ds3x.globalattributes['QC_version_history'] = cfg.__doc__ # put the control file name into the global attributes ds3x.globalattributes['controlfile_name'] = cf['controlfile_name'] if OutLevel == 'L6': ds3x.globalattributes['xlL6_datemode'] = ds3x.globalattributes['xl_datemode'] ds3x.globalattributes['xl_datemode'] = ds3.globalattributes['xl_datemode'] ds3x.globalattributes['xlL6_filename'] = ds3x.globalattributes['xl_filename'] ds3x.globalattributes['xl_filename'] = ds3.globalattributes['xl_filename'] ds3x.globalattributes['xlL6_moddatetime'] = ds3x.globalattributes['xl_moddatetime'] ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes['xl_moddatetime'] elif OutLevel == 'L5': ds3x.globalattributes['xlL5_datemode'] = ds3x.globalattributes['xl_datemode'] ds3x.globalattributes['xl_datemode'] = ds3.globalattributes['xl_datemode'] ds3x.globalattributes['xlL5_filename'] = ds3x.globalattributes['xl_filename'] ds3x.globalattributes['xl_filename'] = ds3.globalattributes['xl_filename'] ds3x.globalattributes['xlL5_moddatetime'] = ds3x.globalattributes['xl_moddatetime'] ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes['xl_moddatetime'] elif OutLevel == 'L4': ds3x.globalattributes['xlL4_datemode'] = ds3x.globalattributes['xl_datemode'] ds3x.globalattributes['xl_datemode'] = ds3.globalattributes['xl_datemode'] ds3x.globalattributes['xlL4_filename'] = ds3x.globalattributes['xl_filename'] ds3x.globalattributes['xl_filename'] = ds3.globalattributes['xl_filename'] ds3x.globalattributes['xlL4_moddatetime'] = ds3x.globalattributes['xl_moddatetime'] ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes['xl_moddatetime'] qcutils.prepOzFluxVars(cf,ds3x) # convert Fc [mgCO2 m-2 s-1] to Fc_co2 [mgCO2 m-2 s-1], Fc_c [mgC m-2 s-1], NEE [umol m-2 s-1] and NEP = - NEE if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='convertFc') and cf['Functions']['convertFc'] == 'True': try: ds3x.globalattributes['L4Functions'] = ds3x.globalattributes['L4Functions']+', convertFc' except: ds3x.globalattributes['L4Functions'] = 'convertFc' if 'Fc_co2' in ds3x.series.keys(): qcts.ConvertFc(cf,ds3x,Fco2_in='Fc_co2') else: qcts.ConvertFc(cf,ds3x) ds4x = copy.deepcopy(ds3x) for ThisOne in ['NEE','NEP','Fc','Fc_co2','Fc_c','Fe','Fh']: if ThisOne in ds4x.series.keys() and ThisOne in ds3.series.keys(): ds4x.series[ThisOne] = ds3.series[ThisOne].copy() for ThisOne in ['GPP','CE','ER_night','ER_dark','CE_day','CE_NEEmax','ER_bio','PD','ER_n','ER_LRF']: if ThisOne in ds4x.series.keys(): ds4x.series[ThisOne]['Data'] = numpy.ones(len(ds4x.series[ThisOne]['Data']),dtype=numpy.float64) * numpy.float64(c.missing_value) ds4x.series[ThisOne]['Flag'] = numpy.ones(len(ds4x.series[ThisOne]['Data']), dtype=numpy.int32) if InLevel == 'L4' or AttrLevel == 'L3': ds4,x = l4qc(cf,ds4x,InLevel,x) qcutils.get_coverage_individual(ds4) qcutils.get_coverage_groups(ds4) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='FlagStats') and cf['Functions']['FlagStats'] == 'True': qcio.get_seriesstats(cf,ds4) if OutLevel == 'L5' or OutLevel == 'L6': try: ds4y = copy.deepcopy(ds4) except: ds4y = copy.deepcopy(ds4x) for ThisOne in ['NEE','NEP','Fc','Fc_c','Fc_co2','Fc_c','Fe','Fh']: var, var_flag, var_attr = qcutils.GetSeriesasMA(ds3x,ThisOne) qcutils.CreateSeries(ds4y,ThisOne,var,Flag=var_flag,Attr=var_attr) ds4y.series[ThisOne]['Attr']['long_name'] = var_attr['long_name'] ds5,y = l5qc(cf,ds4y,y) qcutils.get_coverage_individual(ds5) qcutils.get_coverage_groups(ds5) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='FlagStats') and cf['Functions']['FlagStats'] == 'True': qcio.get_seriesstats(cf,ds5) if OutLevel == 'L6': ds5z = copy.deepcopy(ds5) for ThisOne in ['GPP','CE','ER_night','ER_dark','CE_day','CE_NEEmax','ER_bio','PD','ER_n','ER_LRF']: if ThisOne in ds3x.series.keys(): ds5z.series[ThisOne] = ds3x.series[ThisOne].copy() ds6,z = l6qc(cf,ds5z,z) qcutils.get_coverage_individual(ds6) qcutils.get_coverage_groups(ds6) if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='FlagStats') and cf['Functions']['FlagStats'] == 'True': qcio.get_seriesstats(cf,ds6) # calculate daily statistics if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Sums'): if cf['Functions']['Sums'] == 'L6': ds6.globalattributes['Functions'] = ds6.globalattributes['Functions']+', Sums' try: ds6.globalattributes['L6Functions'] = ds6.globalattributes['L6Functions']+', Sums' except: ds6.globalattributes['L6Functions'] = 'Sums' qcts.do_sums(cf,ds6) elif cf['Functions']['Sums'] == 'L5': ds5.globalattributes['Functions'] = ds5.globalattributes['Functions']+', Sums' try: ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions']+', Sums' except: ds5.globalattributes['L5Functions'] = 'Sums' qcts.do_sums(cf,ds5) elif cf['Functions']['Sums'] == 'L4': ds4.globalattributes['Functions'] = ds4.globalattributes['Functions']+', Sums' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes['L5Functions']+', Sums' except: ds4.globalattributes['L4Functions'] = 'Sums' qcts.do_sums(cf,ds4) # compute climatology if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='climatology'): if cf['Functions']['climatology'] == 'L6': ds6.globalattributes['Functions'] = ds6.globalattributes['Functions']+', climatology' try: ds6.globalattributes['L6Functions'] = ds6.globalattributes['L6Functions']+', climatology' except: ds6.globalattributes['L6Functions'] = 'climatology' qcts.do_climatology(cf,ds6) elif cf['Functions']['climatology'] == 'L5': ds5.globalattributes['Functions'] = ds5.globalattributes['Functions']+', climatology' try: ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions']+', climatology' except: ds5.globalattributes['L5Functions'] = 'climatology' qcts.do_climatology(cf,ds5) elif cf['Functions']['climatology'] == 'L4': ds4.globalattributes['Functions'] = ds4.globalattributes['Functions']+', climatology' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions']+', climatology' except: ds4.globalattributes['L4Functions'] = 'climatology' qcts.do_climatology(cf,ds4) if OutLevel == 'L4' and (InLevel == 'L3' or InLevel == 'L4'): if x == 0: ds4.globalattributes['Functions'] = ds4.globalattributes['Functions'] + ', No further L4 gapfilling' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions'] + ', No further L4 gapfilling' except: ds4.globalattributes['L4Functions'] = 'No further L4 gapfilling' log.warn(' L4: no record of gapfilling functions') return ds4 elif OutLevel == 'L5': if x == 0: if InLevel == 'L3' or InLevel == 'L4': ds4.globalattributes['Functions'] = ds4.globalattributes['Functions'] + ', No further L4 gapfilling' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions'] + ', No further L4 gapfilling' except: ds4.globalattributes['L4Functions'] = 'No further L4 gapfilling' log.warn(' L4: no record of gapfilling functions') ds5.globalattributes['Functions'] = ds5.globalattributes['Functions'] + ', No further L4 gapfilling' try: ds5.globalattributes['L4Functions'] = ds5.globalattributes['L4Functions'] + ', No further L4 gapfilling' except: ds5.globalattributes['L4Functions'] = 'No further L4 gapfilling' if y == 0: ds5.globalattributes['Functions'] = ds5.globalattributes['Functions'] + ', No further L5 gapfilling' try: ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions'] + ', No further L5 gapfilling' except: ds5.globalattributes['L5Functions'] = 'No further L5 gapfilling' log.warn(' L5: no record of gapfilling functions') return ds4,ds5 elif OutLevel == 'L6': if x == 0: if InLevel == 'L3' or InLevel == 'L4': ds4.globalattributes['Functions'] = ds4.globalattributes['Functions'] + ', No further L4 gapfilling' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions'] + ', No further L4 gapfilling' except: ds4.globalattributes['L4Functions'] = 'No further L4 gapfilling' log.warn(' L4: no record of gapfilling functions') if InLevel == 'L3' or InLevel == 'L4' or InLevel == 'L5': ds5.globalattributes['Functions'] = ds5.globalattributes['Functions'] + ', No further L4 gapfilling' try: ds5.globalattributes['L4Functions'] = ds5.globalattributes['L4Functions'] + ', No further L4 gapfilling' except: ds5.globalattributes['L4Functions'] = 'No further L4 gapfilling' log.warn(' L4: no record of gapfilling functions') ds6.globalattributes['Functions'] = ds6.globalattributes['Functions'] + ', No further L4 gapfilling' try: ds6.globalattributes['L4Functions'] = ds6.globalattributes['L4Functions'] + ', No further L4 gapfilling' except: ds6.globalattributes['L4Functions'] = 'No further L4 gapfilling' if y == 0: if InLevel == 'L3' or InLevel == 'L4' or InLevel == 'L5': ds5.globalattributes['Functions'] = ds5.globalattributes['Functions'] + ', No further L5 gapfilling' try: ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions'] + ', No further L5 gapfilling' except: ds5.globalattributes['L5Functions'] = 'No further L5 gapfilling' log.warn(' L5: no record of gapfilling functions') ds6.globalattributes['Functions'] = ds6.globalattributes['Functions'] + ', No further L5 gapfilling' try: ds6.globalattributes['L5Functions'] = ds6.globalattributes['L5Functions'] + ', No further L5 gapfilling' except: ds6.globalattributes['L5Functions'] = 'No further L5 gapfilling' if z == 0: ds6.globalattributes['Functions'] = ds6.globalattributes['Functions'] + ', No further L6 partitioning' try: ds6.globalattributes['L6Functions'] = ds5.globalattributes['L6Functions'] + ', No further L6 partitioning' except: ds6.globalattributes['L6Functions'] = 'No further L6 partitioning' log.warn(' L6: no record of gapfilling functions') return ds4,ds5,ds6
def l3qc(cf,ds2): """ Corrections Generates L3 from L2 data Functions performed: qcts.AddMetVars (optional) qcts.CorrectSWC (optional*) qcck.do_linear (all sites) qcutils.GetMergeList + qcts.MergeSeries Ah_EC (optional)x qcts.TaFromTv (optional) qcutils.GetMergeList + qcts.MergeSeries Ta_EC (optional)x qcts.CoordRotation2D (all sites) qcts.MassmanApprox (optional*)y qcts.Massman (optional*)y qcts.CalculateFluxes (used if Massman not optioned)x qcts.CalculateFluxesRM (used if Massman optioned)y qcts.FhvtoFh (all sites) qcts.Fe_WPL (WPL computed on fluxes, as with Campbell algorithm)+x qcts.Fc_WPL (WPL computed on fluxes, as with Campbell algorithm)+x qcts.Fe_WPLcov (WPL computed on kinematic fluxes (ie, covariances), as with WPL80)+y qcts.Fc_WPLcov (WPL computed on kinematic fluxes (ie, covariances), as with WPL80)+y qcts.CalculateNetRadiation (optional) qcutils.GetMergeList + qcts.MergeSeries Fsd (optional) qcutils.GetMergeList + qcts.MergeSeries Fn (optional*) qcts.InterpolateOverMissing (optional) AverageSeriesByElements (optional) qcts.CorrectFgForStorage (all sites) qcts.Average3SeriesByElements (optional) qcts.CalculateAvailableEnergy (optional) qcck.do_qcchecks (all sites) qcck.gaps (optional) *: requires ancillary measurements for paratmerisation +: each site requires one pair, either Fe_WPL & Fc_WPL (default) or Fe_WPLCov & FcWPLCov x: required together in option set y: required together in option set """ # make a copy of the L2 data ds3 = copy.deepcopy(ds2) # set some attributes for this level qcutils.UpdateGlobalAttributes(cf,ds3,"L3") # initialise the global attribute to document the functions used ds3.globalattributes['Functions'] = '' # put the control file name into the global attributes ds3.globalattributes['controlfile_name'] = cf['controlfile_name'] # check to see if we have any imports qcgf.ImportSeries(cf,ds3) # correct measured soil water content using empirical relationship to collected samples qcts.CorrectSWC(cf,ds3) # apply linear corrections to the data qcck.do_linear(cf,ds3) # merge whatever humidities are available qcts.MergeHumidities(cf,ds3,convert_units=True) # get the air temperature from the CSAT virtual temperature qcts.TaFromTv(cf,ds3) # merge the HMP and corrected CSAT data qcts.MergeSeries(cf,ds3,'Ta',[0,10],convert_units=True) qcutils.CheckUnits(ds3,"Ta","C",convert_units=True) # calculate humidities (absolute, specific and relative) from whatever is available qcts.CalculateHumidities(ds3) # merge the 7500 CO2 concentration qcts.MergeSeries(cf,ds3,'Cc',[0,10],convert_units=True) # PRI - disable CO2 units conversion from whatever to mg/m3 # - this step is, as far as I can see, redundant, see qcts.Fc_WPL() #qcutils.CheckUnits(ds3,"Cc","mg/m3",convert_units=True) # add relevant meteorological values to L3 data qcts.CalculateMeteorologicalVariables(ds3) # check to see if the user wants to use the fluxes in the L2 file if not qcutils.cfoptionskeylogical(cf,Key="UseL2Fluxes",default=False): # check the covariancve units and change if necessary qcts.CheckCovarianceUnits(ds3) # do the 2D coordinate rotation qcts.CoordRotation2D(cf,ds3) # do the Massman frequency attenuation correction qcts.MassmanStandard(cf,ds3) # calculate the fluxes qcts.CalculateFluxes(cf,ds3) # approximate wT from virtual wT using wA (ref: Campbell OPECSystem manual) qcts.FhvtoFh(cf,ds3) # correct the H2O & CO2 flux due to effects of flux on density measurements qcts.Fe_WPL(cf,ds3) qcts.Fc_WPL(cf,ds3) # convert CO2 units if required qcutils.ConvertCO2Units(cf,ds3,Cc='Cc') # calculate Fc storage term - single height only at present qcts.CalculateFcStorage(cf,ds3) # convert Fc and Fc_storage units if required qcutils.ConvertFcUnits(cf,ds3,Fc='Fc',Fc_storage='Fc_storage') # correct Fc for storage term - only recommended if storage calculated from profile available qcts.CorrectFcForStorage(cf,ds3) # merge the incoming shortwave radiation qcts.MergeSeries(cf,ds3,'Fsd',[0,10]) # calculate the net radiation from the Kipp and Zonen CNR1 qcts.CalculateNetRadiation(cf,ds3,Fn_out='Fn_KZ',Fsd_in='Fsd',Fsu_in='Fsu',Fld_in='Fld',Flu_in='Flu') qcts.MergeSeries(cf,ds3,'Fn',[0,10]) # combine wind speed from the Wind Sentry and the CSAT qcts.MergeSeries(cf,ds3,'Ws',[0,10]) # combine wind direction from the Wind Sentry and the CSAT qcts.MergeSeries(cf,ds3,'Wd',[0,10]) # correct soil heat flux for storage # ... either average the raw ground heat flux, soil temperature and moisture # and then do the correction (OzFlux "standard") qcts.AverageSeriesByElements(cf,ds3,'Ts') qcts.AverageSeriesByElements(cf,ds3,'Sws') if qcutils.cfoptionskeylogical(cf,Key='CorrectIndividualFg'): # ... or correct the individual ground heat flux measurements (James' method) qcts.CorrectIndividualFgForStorage(cf,ds3) qcts.AverageSeriesByElements(cf,ds3,'Fg') else: qcts.AverageSeriesByElements(cf,ds3,'Fg') qcts.CorrectFgForStorage(cf,ds3,Fg_out='Fg',Fg_in='Fg',Ts_in='Ts',Sws_in='Sws') # calculate the available energy qcts.CalculateAvailableEnergy(ds3,Fa_out='Fa',Fn_in='Fn',Fg_in='Fg') # create new series using MergeSeries or AverageSeries qcck.CreateNewSeries(cf,ds3) # create a series of daily averaged soil moisture interpolated back to the time step #qcts.DailyAverageSws_Interpolated(cf,ds3,Sws_out='Sws_daily',Sws_in='Sws') # re-apply the quality control checks (range, diurnal and rules) qcck.do_qcchecks(cf,ds3) # coordinate gaps in the three main fluxes qcck.CoordinateFluxGaps(cf,ds3) # coordinate gaps in Ah_7500_Av with Fc qcck.CoordinateAh7500AndFcGaps(cf,ds3) # get the statistics for the QC flags and write these to an Excel spreadsheet qcio.get_seriesstats(cf,ds3) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds3) # write the percentage of good data for groups qcutils.get_coverage_groups(ds3) return ds3
def l4qc(cf,ds3): # !!! code here to use existing L4 file # logic # if the L4 doesn't exist # - create ds4 by using copy.deepcopy(ds3) # if the L4 does exist and the "UseExistingL4File" option is False # - create ds4 by using copy.deepcopy(ds3) # if the L4 does exist and the "UseExistingL4File" option is True # - read the contents of the L4 netCDF file # - check the start and end dates of the L3 and L4 data # - if these are the same then tell the user there is nothing to do # - copy the L3 data to the L4 data structure # - replace the L3 data with the L4 data #ds4 = copy.deepcopy(ds3) ds4 = qcio.copy_datastructure(cf,ds3) # ds4 will be empty (logical false) if an error occurs in copy_datastructure # return from this routine if this is the case if not ds4: return ds4 # set some attributes for this level qcutils.UpdateGlobalAttributes(cf,ds4,"L4") ds4.cf = cf # calculate the available energy if "Fa" not in ds4.series.keys(): qcts.CalculateAvailableEnergy(ds4,Fa_out='Fa',Fn_in='Fn',Fg_in='Fg') # create a dictionary to hold the gap filling data ds_alt = {} # check to see if we have any imports qcgf.ImportSeries(cf,ds4) # re-apply the quality control checks (range, diurnal and rules) qcck.do_qcchecks(cf,ds4) # now do the meteorological driver gap filling for ThisOne in cf["Drivers"].keys(): if ThisOne not in ds4.series.keys(): log.error("Series "+ThisOne+" not in data structure"); continue # parse the control file for information on how the user wants to do the gap filling qcgf.GapFillParseControlFile(cf,ds4,ThisOne,ds_alt) # *** start of the section that does the gap filling of the drivers *** # fill short gaps using interpolation qcgf.GapFillUsingInterpolation(cf,ds4) # gap fill using climatology qcgf.GapFillFromClimatology(ds4) # do the gap filling using the ACCESS output qcgf.GapFillFromAlternate(cf,ds4,ds_alt) if ds4.returncodes["alternate"]=="quit": return ds4 # gap fill using SOLO qcgf.GapFillUsingSOLO(cf,ds3,ds4) if ds4.returncodes["solo"]=="quit": return ds4 # merge the first group of gap filled drivers into a single series qcts.MergeSeriesUsingDict(ds4,merge_order="prerequisite") # re-calculate the ground heat flux but only if requested in control file opt = qcutils.get_keyvaluefromcf(cf,["Options"],"CorrectFgForStorage",default="No",mode="quiet") if opt.lower()!="no": qcts.CorrectFgForStorage(cf,ds4,Fg_out='Fg',Fg_in='Fg_Av',Ts_in='Ts',Sws_in='Sws') # re-calculate the net radiation qcts.CalculateNetRadiation(cf,ds4,Fn_out='Fn',Fsd_in='Fsd',Fsu_in='Fsu',Fld_in='Fld',Flu_in='Flu') # re-calculate the available energy qcts.CalculateAvailableEnergy(ds4,Fa_out='Fa',Fn_in='Fn',Fg_in='Fg') # merge the second group of gap filled drivers into a single series qcts.MergeSeriesUsingDict(ds4,merge_order="standard") # re-calculate the water vapour concentrations qcts.CalculateHumiditiesAfterGapFill(ds4) # re-calculate the meteorological variables qcts.CalculateMeteorologicalVariables(ds4) # the Tumba rhumba qcts.CalculateComponentsFromWsWd(ds4) # check for any missing data qcutils.get_missingingapfilledseries(ds4) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds4) # write the percentage of good data for groups qcutils.get_coverage_groups(ds4) return ds4
def l4to6qc(cf, ds3, AttrLevel, InLevel, OutLevel): """ Fill gaps in met data from other sources Integrate SOLO-ANN gap filled fluxes performed externally Generates L4 from L3 data Generates daily sums excel workbook Variable Series: Meteorological (MList): Ah_EC, Cc_7500_Av, ps, Ta_EC, Ws_CSAT, Wd_CSAT Radiation (RList): Fld, Flu, Fn, Fsd, Fsu Soil water content (SwsList): all variables containing Sws in variable name Soil (SList): Fg, Ts, SwsList Turbulent fluxes (FList): Fc_wpl, Fe_wpl, Fh, ustar Output (OList): MList, RList, SList, FList Parameters loaded from control file: zmd: z-d z0: roughness height Functions performed: qcts.AddMetVars qcts.ComputeDailySums qcts.InterpolateOverMissing (OList for gaps shorter than 3 observations, OList gaps shorter than 7 observations) qcts.GapFillFromAlternate (MList, RList) qcts.GapFillFromClimatology (Ah_EC, Fn, Fg, ps, Ta_EC, Ws_CSAT, OList) qcts.GapFillFromRatios (Fe, Fh, Fc) qcts.ReplaceOnDiff (Ws_CSAT, ustar) qcts.UstarFromFh qcts.ReplaceWhereMissing (Ustar) qcck.do_qcchecks """ if AttrLevel == 'False': ds3.globalattributes['Functions'] = '' AttrLevel = InLevel # check to ensure L4 functions are defined in controlfile if qcutils.cfkeycheck(cf, Base='Functions'): x = 0 y = 0 z = 0 else: log.error('FunctionList not found in control file') ds3x = copy.deepcopy(ds3) ds3x.globalattributes['nc_level'] = 'L3' ds3x.globalattributes['L4Functions'] = 'No L4-L6 functions applied' return ds3x # handle meta-data and import L4-L6 from external process if InLevel == 'L3': ds3x = copy.deepcopy(ds3) else: infilename = qcio.get_infilename_from_cf(cf, InLevel) ds3x = qcio.nc_read_series(infilename) for ThisOne in ds3.globalattributes.keys(): if ThisOne not in ds3x.globalattributes.keys(): ds3x.globalattributes[ThisOne] = ds3.globalattributes[ThisOne] for ThisOne in ds3.series.keys(): if ThisOne in ds3x.series.keys(): for attr in ds3.series[ThisOne]['Attr'].keys(): if attr not in [ 'ancillary_variables', 'long_name', 'standard_name', 'units' ]: ds3x.series[ThisOne]['Attr'][attr] = ds3.series[ ThisOne]['Attr'][attr] ds3x.globalattributes['nc_level'] = AttrLevel ds3x.globalattributes['EPDversion'] = sys.version ds3x.globalattributes['QC_version_history'] = cfg.__doc__ # put the control file name into the global attributes ds3x.globalattributes['controlfile_name'] = cf['controlfile_name'] if OutLevel == 'L6': ds3x.globalattributes['xlL6_datemode'] = ds3x.globalattributes[ 'xl_datemode'] ds3x.globalattributes['xl_datemode'] = ds3.globalattributes[ 'xl_datemode'] ds3x.globalattributes['xlL6_filename'] = ds3x.globalattributes[ 'xl_filename'] ds3x.globalattributes['xl_filename'] = ds3.globalattributes[ 'xl_filename'] ds3x.globalattributes['xlL6_moddatetime'] = ds3x.globalattributes[ 'xl_moddatetime'] ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes[ 'xl_moddatetime'] elif OutLevel == 'L5': ds3x.globalattributes['xlL5_datemode'] = ds3x.globalattributes[ 'xl_datemode'] ds3x.globalattributes['xl_datemode'] = ds3.globalattributes[ 'xl_datemode'] ds3x.globalattributes['xlL5_filename'] = ds3x.globalattributes[ 'xl_filename'] ds3x.globalattributes['xl_filename'] = ds3.globalattributes[ 'xl_filename'] ds3x.globalattributes['xlL5_moddatetime'] = ds3x.globalattributes[ 'xl_moddatetime'] ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes[ 'xl_moddatetime'] elif OutLevel == 'L4': ds3x.globalattributes['xlL4_datemode'] = ds3x.globalattributes[ 'xl_datemode'] ds3x.globalattributes['xl_datemode'] = ds3.globalattributes[ 'xl_datemode'] ds3x.globalattributes['xlL4_filename'] = ds3x.globalattributes[ 'xl_filename'] ds3x.globalattributes['xl_filename'] = ds3.globalattributes[ 'xl_filename'] ds3x.globalattributes['xlL4_moddatetime'] = ds3x.globalattributes[ 'xl_moddatetime'] ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes[ 'xl_moddatetime'] qcutils.prepOzFluxVars(cf, ds3x) # convert Fc [mgCO2 m-2 s-1] to Fc_co2 [mgCO2 m-2 s-1], Fc_c [mgC m-2 s-1], NEE [umol m-2 s-1] and NEP = - NEE if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='convertFc' ) and cf['Functions']['convertFc'] == 'True': try: ds3x.globalattributes['L4Functions'] = ds3x.globalattributes[ 'L4Functions'] + ', convertFc' except: ds3x.globalattributes['L4Functions'] = 'convertFc' if 'Fc_co2' in ds3x.series.keys(): qcts.ConvertFc(cf, ds3x, Fco2_in='Fc_co2') else: qcts.ConvertFc(cf, ds3x) ds4x = copy.deepcopy(ds3x) for ThisOne in ['NEE', 'NEP', 'Fc', 'Fc_co2', 'Fc_c', 'Fe', 'Fh']: if ThisOne in ds4x.series.keys() and ThisOne in ds3.series.keys(): ds4x.series[ThisOne] = ds3.series[ThisOne].copy() for ThisOne in [ 'GPP', 'CE', 'ER_night', 'ER_dark', 'CE_day', 'CE_NEEmax', 'ER_bio', 'PD', 'ER_n', 'ER_LRF' ]: if ThisOne in ds4x.series.keys(): ds4x.series[ThisOne]['Data'] = numpy.ones( len(ds4x.series[ThisOne]['Data']), dtype=numpy.float64) * numpy.float64(c.missing_value) ds4x.series[ThisOne]['Flag'] = numpy.ones(len( ds4x.series[ThisOne]['Data']), dtype=numpy.int32) if InLevel == 'L4' or AttrLevel == 'L3': ds4, x = l4qc(cf, ds4x, InLevel, x) qcutils.get_coverage_individual(ds4) qcutils.get_coverage_groups(ds4) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='FlagStats' ) and cf['Functions']['FlagStats'] == 'True': qcio.get_seriesstats(cf, ds4) if OutLevel == 'L5' or OutLevel == 'L6': try: ds4y = copy.deepcopy(ds4) except: ds4y = copy.deepcopy(ds4x) for ThisOne in [ 'NEE', 'NEP', 'Fc', 'Fc_c', 'Fc_co2', 'Fc_c', 'Fe', 'Fh' ]: var, var_flag, var_attr = qcutils.GetSeriesasMA(ds3x, ThisOne) qcutils.CreateSeries(ds4y, ThisOne, var, Flag=var_flag, Attr=var_attr) ds4y.series[ThisOne]['Attr']['long_name'] = var_attr['long_name'] ds5, y = l5qc(cf, ds4y, y) qcutils.get_coverage_individual(ds5) qcutils.get_coverage_groups(ds5) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='FlagStats' ) and cf['Functions']['FlagStats'] == 'True': qcio.get_seriesstats(cf, ds5) if OutLevel == 'L6': ds5z = copy.deepcopy(ds5) for ThisOne in [ 'GPP', 'CE', 'ER_night', 'ER_dark', 'CE_day', 'CE_NEEmax', 'ER_bio', 'PD', 'ER_n', 'ER_LRF' ]: if ThisOne in ds3x.series.keys(): ds5z.series[ThisOne] = ds3x.series[ThisOne].copy() ds6, z = l6qc(cf, ds5z, z) qcutils.get_coverage_individual(ds6) qcutils.get_coverage_groups(ds6) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='FlagStats' ) and cf['Functions']['FlagStats'] == 'True': qcio.get_seriesstats(cf, ds6) # calculate daily statistics if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Sums'): if cf['Functions']['Sums'] == 'L6': ds6.globalattributes[ 'Functions'] = ds6.globalattributes['Functions'] + ', Sums' try: ds6.globalattributes['L6Functions'] = ds6.globalattributes[ 'L6Functions'] + ', Sums' except: ds6.globalattributes['L6Functions'] = 'Sums' qcts.do_sums(cf, ds6) elif cf['Functions']['Sums'] == 'L5': ds5.globalattributes[ 'Functions'] = ds5.globalattributes['Functions'] + ', Sums' try: ds5.globalattributes['L5Functions'] = ds5.globalattributes[ 'L5Functions'] + ', Sums' except: ds5.globalattributes['L5Functions'] = 'Sums' qcts.do_sums(cf, ds5) elif cf['Functions']['Sums'] == 'L4': ds4.globalattributes[ 'Functions'] = ds4.globalattributes['Functions'] + ', Sums' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes[ 'L5Functions'] + ', Sums' except: ds4.globalattributes['L4Functions'] = 'Sums' qcts.do_sums(cf, ds4) # compute climatology if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='climatology'): if cf['Functions']['climatology'] == 'L6': ds6.globalattributes['Functions'] = ds6.globalattributes[ 'Functions'] + ', climatology' try: ds6.globalattributes['L6Functions'] = ds6.globalattributes[ 'L6Functions'] + ', climatology' except: ds6.globalattributes['L6Functions'] = 'climatology' qcts.do_climatology(cf, ds6) elif cf['Functions']['climatology'] == 'L5': ds5.globalattributes['Functions'] = ds5.globalattributes[ 'Functions'] + ', climatology' try: ds5.globalattributes['L5Functions'] = ds5.globalattributes[ 'L5Functions'] + ', climatology' except: ds5.globalattributes['L5Functions'] = 'climatology' qcts.do_climatology(cf, ds5) elif cf['Functions']['climatology'] == 'L4': ds4.globalattributes['Functions'] = ds4.globalattributes[ 'Functions'] + ', climatology' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes[ 'L4Functions'] + ', climatology' except: ds4.globalattributes['L4Functions'] = 'climatology' qcts.do_climatology(cf, ds4) if OutLevel == 'L4' and (InLevel == 'L3' or InLevel == 'L4'): if x == 0: ds4.globalattributes['Functions'] = ds4.globalattributes[ 'Functions'] + ', No further L4 gapfilling' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes[ 'L4Functions'] + ', No further L4 gapfilling' except: ds4.globalattributes[ 'L4Functions'] = 'No further L4 gapfilling' log.warn(' L4: no record of gapfilling functions') return ds4 elif OutLevel == 'L5': if x == 0: if InLevel == 'L3' or InLevel == 'L4': ds4.globalattributes['Functions'] = ds4.globalattributes[ 'Functions'] + ', No further L4 gapfilling' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes[ 'L4Functions'] + ', No further L4 gapfilling' except: ds4.globalattributes[ 'L4Functions'] = 'No further L4 gapfilling' log.warn(' L4: no record of gapfilling functions') ds5.globalattributes['Functions'] = ds5.globalattributes[ 'Functions'] + ', No further L4 gapfilling' try: ds5.globalattributes['L4Functions'] = ds5.globalattributes[ 'L4Functions'] + ', No further L4 gapfilling' except: ds5.globalattributes[ 'L4Functions'] = 'No further L4 gapfilling' if y == 0: ds5.globalattributes['Functions'] = ds5.globalattributes[ 'Functions'] + ', No further L5 gapfilling' try: ds5.globalattributes['L5Functions'] = ds5.globalattributes[ 'L5Functions'] + ', No further L5 gapfilling' except: ds5.globalattributes[ 'L5Functions'] = 'No further L5 gapfilling' log.warn(' L5: no record of gapfilling functions') return ds4, ds5 elif OutLevel == 'L6': if x == 0: if InLevel == 'L3' or InLevel == 'L4': ds4.globalattributes['Functions'] = ds4.globalattributes[ 'Functions'] + ', No further L4 gapfilling' try: ds4.globalattributes['L4Functions'] = ds4.globalattributes[ 'L4Functions'] + ', No further L4 gapfilling' except: ds4.globalattributes[ 'L4Functions'] = 'No further L4 gapfilling' log.warn(' L4: no record of gapfilling functions') if InLevel == 'L3' or InLevel == 'L4' or InLevel == 'L5': ds5.globalattributes['Functions'] = ds5.globalattributes[ 'Functions'] + ', No further L4 gapfilling' try: ds5.globalattributes['L4Functions'] = ds5.globalattributes[ 'L4Functions'] + ', No further L4 gapfilling' except: ds5.globalattributes[ 'L4Functions'] = 'No further L4 gapfilling' log.warn(' L4: no record of gapfilling functions') ds6.globalattributes['Functions'] = ds6.globalattributes[ 'Functions'] + ', No further L4 gapfilling' try: ds6.globalattributes['L4Functions'] = ds6.globalattributes[ 'L4Functions'] + ', No further L4 gapfilling' except: ds6.globalattributes[ 'L4Functions'] = 'No further L4 gapfilling' if y == 0: if InLevel == 'L3' or InLevel == 'L4' or InLevel == 'L5': ds5.globalattributes['Functions'] = ds5.globalattributes[ 'Functions'] + ', No further L5 gapfilling' try: ds5.globalattributes['L5Functions'] = ds5.globalattributes[ 'L5Functions'] + ', No further L5 gapfilling' except: ds5.globalattributes[ 'L5Functions'] = 'No further L5 gapfilling' log.warn(' L5: no record of gapfilling functions') ds6.globalattributes['Functions'] = ds6.globalattributes[ 'Functions'] + ', No further L5 gapfilling' try: ds6.globalattributes['L5Functions'] = ds6.globalattributes[ 'L5Functions'] + ', No further L5 gapfilling' except: ds6.globalattributes[ 'L5Functions'] = 'No further L5 gapfilling' if z == 0: ds6.globalattributes['Functions'] = ds6.globalattributes[ 'Functions'] + ', No further L6 partitioning' try: ds6.globalattributes['L6Functions'] = ds5.globalattributes[ 'L6Functions'] + ', No further L6 partitioning' except: ds6.globalattributes[ 'L6Functions'] = 'No further L6 partitioning' log.warn(' L6: no record of gapfilling functions') return ds4, ds5, ds6
def l4qc(cf, ds3): # !!! code here to use existing L4 file # logic # if the L4 doesn't exist # - create ds4 by using copy.deepcopy(ds3) # if the L4 does exist and the "UseExistingL4File" option is False # - create ds4 by using copy.deepcopy(ds3) # if the L4 does exist and the "UseExistingL4File" option is True # - read the contents of the L4 netCDF file # - check the start and end dates of the L3 and L4 data # - if these are the same then tell the user there is nothing to do # - copy the L3 data to the L4 data structure # - replace the L3 data with the L4 data #ds4 = copy.deepcopy(ds3) ds4 = qcio.copy_datastructure(cf, ds3) # ds4 will be empty (logical false) if an error occurs in copy_datastructure # return from this routine if this is the case if not ds4: return ds4 # set some attributes for this level qcutils.UpdateGlobalAttributes(cf, ds4, "L4") ds4.cf = cf # calculate the available energy if "Fa" not in ds4.series.keys(): qcts.CalculateAvailableEnergy(ds4, Fa_out='Fa', Fn_in='Fn', Fg_in='Fg') # create a dictionary to hold the gap filling data ds_alt = {} # check to see if we have any imports qcgf.ImportSeries(cf, ds4) # re-apply the quality control checks (range, diurnal and rules) qcck.do_qcchecks(cf, ds4) # now do the meteorological driver gap filling for ThisOne in cf["Drivers"].keys(): if ThisOne not in ds4.series.keys(): log.error("Series " + ThisOne + " not in data structure") continue # parse the control file for information on how the user wants to do the gap filling qcgf.GapFillParseControlFile(cf, ds4, ThisOne, ds_alt) # *** start of the section that does the gap filling of the drivers *** # fill short gaps using interpolation qcgf.GapFillUsingInterpolation(cf, ds4) # gap fill using climatology qcgf.GapFillFromClimatology(ds4) # do the gap filling using the ACCESS output qcgf.GapFillFromAlternate(cf, ds4, ds_alt) if ds4.returncodes["alternate"] == "quit": return ds4 # gap fill using SOLO qcgf.GapFillUsingSOLO(cf, ds3, ds4) if ds4.returncodes["solo"] == "quit": return ds4 # merge the first group of gap filled drivers into a single series qcts.MergeSeriesUsingDict(ds4, merge_order="prerequisite") # re-calculate the ground heat flux but only if requested in control file opt = qcutils.get_keyvaluefromcf(cf, ["Options"], "CorrectFgForStorage", default="No", mode="quiet") if opt.lower() != "no": qcts.CorrectFgForStorage(cf, ds4, Fg_out='Fg', Fg_in='Fg_Av', Ts_in='Ts', Sws_in='Sws') # re-calculate the net radiation qcts.CalculateNetRadiation(cf, ds4, Fn_out='Fn', Fsd_in='Fsd', Fsu_in='Fsu', Fld_in='Fld', Flu_in='Flu') # re-calculate the available energy qcts.CalculateAvailableEnergy(ds4, Fa_out='Fa', Fn_in='Fn', Fg_in='Fg') # merge the second group of gap filled drivers into a single series qcts.MergeSeriesUsingDict(ds4, merge_order="standard") # re-calculate the water vapour concentrations qcts.CalculateHumiditiesAfterGapFill(ds4) # re-calculate the meteorological variables qcts.CalculateMeteorologicalVariables(ds4) # the Tumba rhumba qcts.CalculateComponentsFromWsWd(ds4) # check for any missing data qcutils.get_missingingapfilledseries(ds4) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds4) # write the percentage of good data for groups qcutils.get_coverage_groups(ds4) return ds4
def l3qc(cf, ds2): """ Corrections Generates L3 from L2 data Functions performed: qcts.AddMetVars (optional) qcts.CorrectSWC (optional*) qcck.do_linear (all sites) qcutils.GetMergeList + qcts.MergeSeries Ah_EC (optional)x qcts.TaFromTv (optional) qcutils.GetMergeList + qcts.MergeSeries Ta_EC (optional)x qcts.CoordRotation2D (all sites) qcts.MassmanApprox (optional*)y qcts.Massman (optional*)y qcts.CalculateFluxes (used if Massman not optioned)x qcts.CalculateFluxesRM (used if Massman optioned)y qcts.FhvtoFh (all sites) qcts.Fe_WPL (WPL computed on fluxes, as with Campbell algorithm)+x qcts.Fc_WPL (WPL computed on fluxes, as with Campbell algorithm)+x qcts.Fe_WPLcov (WPL computed on kinematic fluxes (ie, covariances), as with WPL80)+y qcts.Fc_WPLcov (WPL computed on kinematic fluxes (ie, covariances), as with WPL80)+y qcts.CalculateNetRadiation (optional) qcutils.GetMergeList + qcts.MergeSeries Fsd (optional) qcutils.GetMergeList + qcts.MergeSeries Fn (optional*) qcts.InterpolateOverMissing (optional) AverageSeriesByElements (optional) qcts.CorrectFgForStorage (all sites) qcts.Average3SeriesByElements (optional) qcts.CalculateAvailableEnergy (optional) qcck.do_qcchecks (all sites) qcck.gaps (optional) *: requires ancillary measurements for paratmerisation +: each site requires one pair, either Fe_WPL & Fc_WPL (default) or Fe_WPLCov & FcWPLCov x: required together in option set y: required together in option set """ # make a copy of the L2 data ds3 = copy.deepcopy(ds2) # set some attributes for this level qcutils.UpdateGlobalAttributes(cf, ds3, "L3") # initialise the global attribute to document the functions used ds3.globalattributes['Functions'] = '' # put the control file name into the global attributes ds3.globalattributes['controlfile_name'] = cf['controlfile_name'] # check to see if we have any imports qcgf.ImportSeries(cf, ds3) # correct measured soil water content using empirical relationship to collected samples qcts.CorrectSWC(cf, ds3) # apply linear corrections to the data qcck.do_linear(cf, ds3) # merge whatever humidities are available qcts.MergeHumidities(cf, ds3, convert_units=True) # get the air temperature from the CSAT virtual temperature qcts.TaFromTv(cf, ds3) # merge the HMP and corrected CSAT data qcts.MergeSeries(cf, ds3, 'Ta', [0, 10], convert_units=True) qcutils.CheckUnits(ds3, "Ta", "C", convert_units=True) # calculate humidities (absolute, specific and relative) from whatever is available qcts.CalculateHumidities(ds3) # merge the 7500 CO2 concentration qcts.MergeSeries(cf, ds3, 'Cc', [0, 10], convert_units=True) qcutils.CheckUnits(ds3, "Cc", "mg/m3", convert_units=True) # add relevant meteorological values to L3 data qcts.CalculateMeteorologicalVariables(ds3) # check to see if the user wants to use the fluxes in the L2 file if not qcutils.cfoptionskeylogical(cf, Key="UseL2Fluxes", default=False): # check the covariancve units and change if necessary qcts.CheckCovarianceUnits(ds3) # do the 2D coordinate rotation qcts.CoordRotation2D(cf, ds3) # do the Massman frequency attenuation correction qcts.MassmanStandard(cf, ds3) # calculate the fluxes qcts.CalculateFluxes(cf, ds3) # approximate wT from virtual wT using wA (ref: Campbell OPECSystem manual) qcts.FhvtoFh(cf, ds3) # correct the H2O & CO2 flux due to effects of flux on density measurements qcts.Fe_WPL(cf, ds3) qcts.Fc_WPL(cf, ds3) # convert CO2 units if required qcutils.ConvertCO2Units(cf, ds3, Cc='Cc') # calculate Fc storage term - single height only at present qcts.CalculateFcStorage(cf, ds3) # convert Fc and Fc_storage units if required qcutils.ConvertFcUnits(cf, ds3, Fc='Fc', Fc_storage='Fc_storage') # correct Fc for storage term - only recommended if storage calculated from profile available qcts.CorrectFcForStorage(cf, ds3) # merge the incoming shortwave radiation qcts.MergeSeries(cf, ds3, 'Fsd', [0, 10]) # calculate the net radiation from the Kipp and Zonen CNR1 qcts.CalculateNetRadiation(cf, ds3, Fn_out='Fn_KZ', Fsd_in='Fsd', Fsu_in='Fsu', Fld_in='Fld', Flu_in='Flu') qcts.MergeSeries(cf, ds3, 'Fn', [0, 10]) # combine wind speed from the Wind Sentry and the CSAT qcts.MergeSeries(cf, ds3, 'Ws', [0, 10]) # combine wind direction from the Wind Sentry and the CSAT qcts.MergeSeries(cf, ds3, 'Wd', [0, 10]) # correct soil heat flux for storage # ... either average the raw ground heat flux, soil temperature and moisture # and then do the correction (OzFlux "standard") qcts.AverageSeriesByElements(cf, ds3, 'Ts') qcts.AverageSeriesByElements(cf, ds3, 'Sws') if qcutils.cfoptionskeylogical(cf, Key='CorrectIndividualFg'): # ... or correct the individual ground heat flux measurements (James' method) qcts.CorrectIndividualFgForStorage(cf, ds3) qcts.AverageSeriesByElements(cf, ds3, 'Fg') else: qcts.AverageSeriesByElements(cf, ds3, 'Fg') qcts.CorrectFgForStorage(cf, ds3, Fg_out='Fg', Fg_in='Fg', Ts_in='Ts', Sws_in='Sws') # calculate the available energy qcts.CalculateAvailableEnergy(ds3, Fa_out='Fa', Fn_in='Fn', Fg_in='Fg') # create new series using MergeSeries or AverageSeries qcck.CreateNewSeries(cf, ds3) # create a series of daily averaged soil moisture interpolated back to the time step #qcts.DailyAverageSws_Interpolated(cf,ds3,Sws_out='Sws_daily',Sws_in='Sws') # re-apply the quality control checks (range, diurnal and rules) qcck.do_qcchecks(cf, ds3) # coordinate gaps in the three main fluxes qcck.CoordinateFluxGaps(cf, ds3) # coordinate gaps in Ah_7500_Av with Fc qcck.CoordinateAh7500AndFcGaps(cf, ds3) # get the statistics for the QC flags and write these to an Excel spreadsheet qcio.get_seriesstats(cf, ds3) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds3) # write the percentage of good data for groups qcutils.get_coverage_groups(ds3) return ds3
def l3qc(cf, ds2): """ Corrections Generates L3 from L2 data Functions performed: qcts.AddMetVars (optional) qcts.CorrectSWC (optional*) qcck.do_linear (all sites) qcutils.GetMergeList + qcts.MergeSeries Ah_EC (optional)x qcts.TaFromTv (optional) qcutils.GetMergeList + qcts.MergeSeries Ta_EC (optional)x qcts.CoordRotation2D (all sites) qcts.MassmanApprox (optional*)y qcts.Massman (optional*)y qcts.CalculateFluxes (used if Massman not optioned)x qcts.CalculateFluxesRM (used if Massman optioned)y qcts.FhvtoFh (all sites) qcts.Fe_WPL (WPL computed on fluxes, as with Campbell algorithm)+x qcts.Fc_WPL (WPL computed on fluxes, as with Campbell algorithm)+x qcts.Fe_WPLcov (WPL computed on kinematic fluxes (ie, covariances), as with WPL80)+y qcts.Fc_WPLcov (WPL computed on kinematic fluxes (ie, covariances), as with WPL80)+y qcts.CalculateNetRadiation (optional) qcutils.GetMergeList + qcts.MergeSeries Fsd (optional) qcutils.GetMergeList + qcts.MergeSeries Fn (optional*) qcts.InterpolateOverMissing (optional) AverageSeriesByElements (optional) qcts.CorrectFgForStorage (all sites) qcts.Average3SeriesByElements (optional) qcts.CalculateAvailableEnergy (optional) qcck.do_qcchecks (all sites) qcck.gaps (optional) *: requires ancillary measurements for paratmerisation +: each site requires one pair, either Fe_WPL & Fc_WPL (default) or Fe_WPLCov & FcWPLCov x: required together in option set y: required together in option set """ # make a copy of the L2 data ds3 = copy.deepcopy(ds2) ds3.globalattributes['nc_level'] = 'L3' ds3.globalattributes['EPDversion'] = sys.version ds3.globalattributes['QC_version_history'] = cfg.__doc__ # put the control file name into the global attributes ds3.globalattributes['controlfile_name'] = cf['controlfile_name'] # calculate NDVI if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='NDVI') and cf['Functions']['NDVI'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', calculateNDVI' except: ds3.globalattributes['L3Functions'] = 'calculateNDVI' log.info(' Calculating NDVI from component reflectances ...') qcts.CalculateNDVI(cf, ds3) # bypass soil temperature correction for Sws (when Ts bad) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='BypassSwsTcorr' ) and cf['Functions']['BypassSwsTcorr'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', BypassSwsTcorr' except: ds3.globalattributes['L3Functions'] = 'BypassSwsTcorr' log.info(' Re-computing Sws without temperature correction ...') qcts.BypassTcorr(cf, ds3) # correct measured soil water content using empirical relationship to collected samples if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='CorrectSWC') and cf['Functions']['CorrectSWC'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CorrectSWC' except: ds3.globalattributes['L3Functions'] = 'CorrectSWC' log.info(' Correcting soil moisture data ...') qcts.CorrectSWC(cf, ds3) # apply linear corrections to the data if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', do_linear' except: ds3.globalattributes['L3Functions'] = 'do_linear' log.info(' Applying linear corrections ...') qcck.do_linear(cf, ds3) # determine HMP Ah if not output by datalogger if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='CalculateAh' ) and cf['Functions']['CalculateAh'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CalculateAh' except: ds3.globalattributes['L3Functions'] = 'CalculateAh' log.info(' Adding HMP Ah to database') qcts.CalculateAhHMP(cf, ds3) # merge the HMP and corrected 7500 data if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='MergeSeriesAhTa' ) and cf['Functions']['MergeSeriesAhTa'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', MergeSeriesAhTaCc' except: ds3.globalattributes['L3Functions'] = 'MergeSeriesAhTaCc' qcts.MergeSeries(cf, ds3, 'Ah', [0, 10]) qcts.MergeSeries(cf, ds3, 'Cc', [0, 10]) # get the air temperature from the CSAT virtual temperature try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', TaFromTv' except: ds3.globalattributes['L3Functions'] = 'TaFromTv' qcts.TaFromTv(cf, ds3) # merge the HMP and corrected CSAT data qcts.MergeSeries(cf, ds3, 'Ta', [0, 10]) # add relevant meteorological values to L3 data if (qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections') and cf['Functions']['Corrections'] == 'True') or (qcutils.cfkeycheck( cf, Base='Functions', ThisOne='CalculateMetVars') and cf['Functions']['CalculateMetVars'] == 'True'): try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CalculateMetVars' except: ds3.globalattributes['L3Functions'] = 'CalculateMetVars' log.info(' Adding standard met variables to database') qcts.CalculateMeteorologicalVariables(ds3) # do the 2D coordinate rotation if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CoordRotation2D' except: ds3.globalattributes['L3Functions'] = 'CoordRotation2D' qcts.CoordRotation2D(cf, ds3) # do the Massman frequency attenuation correction if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', Massman' except: ds3.globalattributes['L3Functions'] = 'Massman' qcts.MassmanStandard(cf, ds3) # calculate the fluxes if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CalculateFluxes' except: ds3.globalattributes['L3Functions'] = 'CalculateFluxes' qcts.CalculateFluxes(cf, ds3) # approximate wT from virtual wT using wA (ref: Campbell OPECSystem manual) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', FhvtoFh' except: ds3.globalattributes['L3Functions'] = 'FhvtoFh' qcts.FhvtoFh(cf, ds3) # correct the H2O & CO2 flux due to effects of flux on density measurements if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='WPLcov') and cf['Functions']['WPLcov'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', WPLcov' except: ds3.globalattributes['L3Functions'] = 'WPLcov' qcts.do_WPL(cf, ds3, cov='True') else: try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', WPL' except: ds3.globalattributes['L3Functions'] = 'WPL' qcts.do_WPL(cf, ds3) # calculate the net radiation from the Kipp and Zonen CNR1 if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='CalculateNetRadiation' ) and cf['Functions']['CalculateNetRadiation'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CalculateNetRadiation' except: ds3.globalattributes['L3Functions'] = 'CalculateNetRadiation' qcts.MergeSeries(cf, ds3, 'Fsd', [0, 10]) qcts.CalculateNetRadiation(ds3, 'Fn_KZ', 'Fsd', 'Fsu', 'Fld', 'Flu') qcts.MergeSeries(cf, ds3, 'Fn', [0, 10]) # combine wind speed from the CSAT and the Wind Sentry if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='MergeSeriesWS' ) and cf['Functions']['MergeSeriesWS'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', MergeSeriesWS' except: ds3.globalattributes['L3Functions'] = 'MergeSeriesWS' qcts.MergeSeries(cf, ds3, 'Ws', [0, 10]) # average the soil temperature data if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': if 'SoilAverage' not in ds3.globalattributes['L3Functions']: try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', SoilAverage' except: ds3.globalattributes['L3Functions'] = 'SoilAverage' # interpolate over any ramaining gaps up to 3 hours in length qcts.AverageSeriesByElementsI(cf, ds3, 'Ts') qcts.AverageSeriesByElementsI(cf, ds3, 'Sws') # correct the measured soil heat flux for storage in the soil layer above the sensor if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CorrectFgForStorage' except: ds3.globalattributes['L3Functions'] = 'CorrectFgForStorage' if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='IndividualFgCorrection' ) and cf['Functions']['IndividualFgCorrection'] == 'True': qcts.CorrectIndividualFgForStorage(cf, ds3) qcts.AverageSeriesByElementsI(cf, ds3, 'Fg') else: qcts.AverageSeriesByElementsI(cf, ds3, 'Fg') qcts.CorrectGroupFgForStorage(cf, ds3) # calculate the available energy if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='CalculateAvailableEnergy' ) and cf['Functions']['CalculateAvailableEnergy'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CalculateAvailableEnergy' except: ds3.globalattributes['L3Functions'] = 'CalculateAvailableEnergy' qcts.CalculateAvailableEnergy(ds3) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='DiagnosticMode'): if cf['Functions']['DiagnosticMode'] == 'False': qcutils.prepOzFluxVars(cf, ds3) else: qcutils.prepOzFluxVars(cf, ds3) # calculate specific humidity and saturated specific humidity profile if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='qTprofile') and cf['Functions']['qTprofile'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', qTprofile' except: ds3.globalattributes['L3Functions'] = 'qTprofile' qcts.CalculateSpecificHumidityProfile(cf, ds3) # calculate Penman-Monteith inversion if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='PenmanMonteith' ) and cf['Functions']['PenmanMonteith'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', PenmanMonteith' except: ds3.globalattributes['L3Functions'] = 'PenmanMonteith' qcts.do_PenmanMonteith(cf, ds3) # calculate bulk Richardson numbers if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='bulkRichardson' ) and cf['Functions']['bulkRichardson'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', bulkRichardson' except: ds3.globalattributes['L3Functions'] = 'bulkRichardson' qcts.do_bulkRichardson(cf, ds3) # re-apply the quality control checks (range, diurnal and rules) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)' qcck.do_qcchecks(cf, ds3) # quality control checks (range, diurnal and rules) without flux post-processing if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='QCChecks') and cf['Functions']['QCChecks'] == 'True': qcck.do_qcchecks(cf, ds3) # apply the ustar filter if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='ustarFilter' ) and cf['Functions']['ustarFilter'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', ustarFilter' except: ds3.globalattributes['L3Functions'] = 'ustarFilter' qcts.FilterFcByUstar(cf, ds3) # coordinate gaps in the three main fluxes if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='CoordinateFluxGaps' ) and cf['Functions']['CoordinateFluxGaps'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CoordinateFluxGaps' except: ds3.globalattributes['L3Functions'] = 'CoordinateFluxGaps' qcck.CoordinateFluxGaps(cf, ds3) # coordinate gaps in Ah_7500_Av with Fc if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CoordinateAh7500AndFcGaps' except: ds3.globalattributes['L3Functions'] = 'CoordinateAh7500AndFcGaps' qcck.CoordinateAh7500AndFcGaps(cf, ds3) # calcluate ET at observation interval if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='CalculateET' ) and cf['Functions']['CalculateET'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', CalculateET' except: ds3.globalattributes['L3Functions'] = 'CalculateET' log.info(' Calculating ET') qcts.CalculateET(cf, ds3, 'L3') # run MOST (Buckingham Pi) 2d footprint model (Kljun et al. 2004) if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='footprint') and cf['Functions']['footprint'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', footprint' except: ds3.globalattributes['L3Functions'] = 'footprint' qcts.do_footprint_2d(cf, ds3) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections' ) and cf['Functions']['Corrections'] == 'True': qcio.get_seriesstats(cf, ds3) # convert Fc [mgCO2 m-2 s-1] to Fc_co2 [mgCO2 m-2 s-1], Fc_c [mgC m-2 s-1], NEE [umol m-2 s-1] and NEP = - NEE if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='convertFc') and cf['Functions']['convertFc'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', convertFc' except: ds3.globalattributes['L3Functions'] = 'convertFc' qcts.ConvertFc(cf, ds3) # convert Fc [mgCO2 m-2 s-1] to Fc [umol m-2 s-1] if qcutils.cfkeycheck( cf, Base='Functions', ThisOne='JasonFc') and cf['Functions']['JasonFc'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', convertFc (umol only)' except: ds3.globalattributes['L3Functions'] = 'convertFc (umol only)' qcts.ConvertFcJason(cf, ds3) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds3) # write the percentage of good data for groups qcutils.get_coverage_groups(ds3) # compute water-use efficiency from flux-gradient similarity (appendix A, Scanlon & Sahu 2008) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='wue') and cf['Functions']['wue'] == 'True': try: ds3.globalattributes[ 'L3Functions'] = ds3.globalattributes['L3Functions'] + ', wue' except: ds3.globalattributes['L3Functions'] = 'wue' log.info( ' Calculating water-use efficiency from flux-gradient similarity') qcts.CalculateWUEfromSimilarity(cf, ds3) # compute climatology for L3 data if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='climatology' ) and cf['Functions']['climatology'] == 'True': try: ds3.globalattributes['L3Functions'] = ds3.globalattributes[ 'L3Functions'] + ', climatology' except: ds3.globalattributes['L3Functions'] = 'climatology' qcts.do_climatology(cf, ds3) if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Sums') and cf['Functions']['Sums'] == 'L3': try: ds3.globalattributes[ 'L3Functions'] = ds3.globalattributes['L5Functions'] + ', Sums' except: ds3.globalattributes['L3Functions'] = 'Sums' qcts.do_sums(cf, ds3) try: ds3.globalattributes['Functions'] = ds3.globalattributes[ 'Functions'] + ', ' + ds3.globalattributes['L3Functions'] except: ds3.globalattributes['Functions'] = ds3.globalattributes['L3Functions'] return ds3
def l3qc(cf, ds2): """ """ # make a copy of the L2 data ds3 = copy.deepcopy(ds2) # set some attributes for this level qcutils.UpdateGlobalAttributes(cf, ds3, "L3") # put the control file name into the global attributes ds3.globalattributes['controlfile_name'] = cf['controlfile_name'] # check to see if we have any imports qcgf.ImportSeries(cf, ds3) # apply linear corrections to the data qcck.do_linear(cf, ds3) # ************************ # *** Merge humidities *** # ************************ # merge whatever humidities are available qcts.MergeHumidities(cf, ds3, convert_units=True) # ************************** # *** Merge temperatures *** # ************************** # get the air temperature from the CSAT virtual temperature qcts.TaFromTv(cf, ds3) # merge the HMP and corrected CSAT data qcts.MergeSeries(cf, ds3, "Ta", convert_units=True) qcutils.CheckUnits(ds3, "Ta", "C", convert_units=True) # *************************** # *** Calcuate humidities *** # *************************** # calculate humidities (absolute, specific and relative) from whatever is available qcts.CalculateHumidities(ds3) # ******************************** # *** Merge CO2 concentrations *** # ******************************** # merge the 7500 CO2 concentration # PRI 09/08/2017 possibly the ugliest thing I have done yet # This needs to be abstracted to a general alias checking routine at the # start of the L3 processing so that possible aliases are mapped to a single # set of variable names. if "CO2" in cf["Variables"]: CO2 = "CO2" elif "Cc" in cf["Variables"]: CO2 = "Cc" else: msg = "Label for CO2 ('CO2','Cc') not found in control file" logger.error(msg) return qcts.MergeSeries(cf, ds3, CO2, convert_units=True) # ****************************************** # *** Calculate meteorological variables *** # ****************************************** # Update meteorological variables qcts.CalculateMeteorologicalVariables(ds3) # ************************************************* # *** Calculate fluxes from covariances section *** # ************************************************* # check to see if the user wants to use the fluxes in the L2 file if not qcutils.cfoptionskeylogical(cf, Key="UseL2Fluxes", default=False): # check the covariance units and change if necessary qcts.CheckCovarianceUnits(ds3) # do the 2D coordinate rotation qcts.CoordRotation2D(cf, ds3) # do the Massman frequency attenuation correction qcts.MassmanStandard(cf, ds3) # calculate the fluxes qcts.CalculateFluxes(cf, ds3) # approximate wT from virtual wT using wA (ref: Campbell OPECSystem manual) qcts.FhvtoFh(cf, ds3) # correct the H2O & CO2 flux due to effects of flux on density measurements qcts.Fe_WPL(cf, ds3) qcts.Fc_WPL(cf, ds3) # ************************************** # *** Calculate Monin-Obukhov length *** # ************************************** qcts.CalculateMoninObukhovLength(ds3) # ************************** # *** CO2 and Fc section *** # ************************** # convert CO2 units if required qcutils.ConvertCO2Units(cf, ds3, CO2=CO2) # calculate Fc storage term - single height only at present qcts.CalculateFcStorageSinglePoint(cf, ds3, Fc_out='Fc_single', CO2_in=CO2) # convert Fc and Fc_storage units if required qcutils.ConvertFcUnits(cf, ds3) # merge Fc and Fc_storage series if required merge_list = [ label for label in cf["Variables"].keys() if label[0:2] == "Fc" and "MergeSeries" in cf["Variables"][label].keys() ] for label in merge_list: qcts.MergeSeries(cf, ds3, label, save_originals=True) # correct Fc for storage term - only recommended if storage calculated from profile available qcts.CorrectFcForStorage(cf, ds3) # ************************* # *** Radiation section *** # ************************* # merge the incoming shortwave radiation qcts.MergeSeries(cf, ds3, 'Fsd') # calculate the net radiation from the Kipp and Zonen CNR1 qcts.CalculateNetRadiation(cf, ds3, Fn_out='Fn_KZ', Fsd_in='Fsd', Fsu_in='Fsu', Fld_in='Fld', Flu_in='Flu') qcts.MergeSeries(cf, ds3, 'Fn') # **************************************** # *** Wind speed and direction section *** # **************************************** # combine wind speed from the Wind Sentry and the SONIC qcts.MergeSeries(cf, ds3, 'Ws') # combine wind direction from the Wind Sentry and the SONIC qcts.MergeSeries(cf, ds3, 'Wd') # ******************** # *** Soil section *** # ******************** # correct soil heat flux for storage # ... either average the raw ground heat flux, soil temperature and moisture # and then do the correction (OzFlux "standard") qcts.AverageSeriesByElements(cf, ds3, 'Ts') qcts.AverageSeriesByElements(cf, ds3, 'Sws') if qcutils.cfoptionskeylogical(cf, Key='CorrectIndividualFg'): # ... or correct the individual ground heat flux measurements (James' method) qcts.CorrectIndividualFgForStorage(cf, ds3) qcts.AverageSeriesByElements(cf, ds3, 'Fg') else: qcts.AverageSeriesByElements(cf, ds3, 'Fg') qcts.CorrectFgForStorage(cf, ds3, Fg_out='Fg', Fg_in='Fg', Ts_in='Ts', Sws_in='Sws') # calculate the available energy qcts.CalculateAvailableEnergy(ds3, Fa_out='Fa', Fn_in='Fn', Fg_in='Fg') # create new series using MergeSeries or AverageSeries qcck.CreateNewSeries(cf, ds3) # re-apply the quality control checks (range, diurnal and rules) qcck.do_qcchecks(cf, ds3) # coordinate gaps in the three main fluxes qcck.CoordinateFluxGaps(cf, ds3) # coordinate gaps in Ah_7500_Av with Fc qcck.CoordinateAh7500AndFcGaps(cf, ds3) # check missing data and QC flags are consistent qcutils.CheckQCFlags(ds3) # get the statistics for the QC flags and write these to an Excel spreadsheet qcio.get_seriesstats(cf, ds3) # write the percentage of good data as a variable attribute qcutils.get_coverage_individual(ds3) # write the percentage of good data for groups qcutils.get_coverage_groups(ds3) return ds3