Esempio n. 1
0
def climatology(cf):
    nc_filename = qcio.get_infilenamefromcf(cf)
    if not qcutils.file_exists(nc_filename): return
    xl_filename = nc_filename.replace(".nc", "_Climatology.xls")
    xlFile = xlwt.Workbook()
    ds = qcio.nc_read_series(nc_filename)
    # calculate Fa if it is not in the data structure
    if "Fa" not in ds.series.keys():
        if "Fn" in ds.series.keys() and "Fg" in ds.series.keys():
            qcts.CalculateAvailableEnergy(ds,
                                          Fa_out='Fa',
                                          Fn_in='Fn',
                                          Fg_in='Fg')
        else:
            log.warning(" Climatology: Fn or Fg not in data struicture")
    # get the time step
    ts = int(ds.globalattributes['time_step'])
    # get the site name
    SiteName = ds.globalattributes['site_name']
    # get the datetime series
    dt = ds.series['DateTime']['Data']
    Hdh = ds.series['Hdh']['Data']
    Month = ds.series['Month']['Data']
    # get the initial start and end dates
    StartDate = str(dt[0])
    EndDate = str(dt[-1])
    # find the start index of the first whole day (time=00:30)
    si = qcutils.GetDateIndex(dt,
                              StartDate,
                              ts=ts,
                              default=0,
                              match='startnextday')
    # find the end index of the last whole day (time=00:00)
    ei = qcutils.GetDateIndex(dt,
                              EndDate,
                              ts=ts,
                              default=-1,
                              match='endpreviousday')
    # get local views of the datetime series
    ldt = dt[si:ei + 1]
    Hdh = Hdh[si:ei + 1]
    Month = Month[si:ei + 1]
    # get the number of time steps in a day and the number of days in the data
    ntsInDay = int(24.0 * 60.0 / float(ts))
    nDays = int(len(ldt)) / ntsInDay

    for ThisOne in cf['Variables'].keys():
        if "AltVarName" in cf['Variables'][ThisOne].keys():
            ThisOne = cf['Variables'][ThisOne]["AltVarName"]
        if ThisOne in ds.series.keys():
            log.info(" Doing climatology for " + ThisOne)
            data, f, a = qcutils.GetSeriesasMA(ds, ThisOne, si=si, ei=ei)
            if numpy.ma.count(data) == 0:
                log.warning(" No data for " + ThisOne + ", skipping ...")
                continue
            fmt_str = get_formatstring(cf, ThisOne, fmt_def='')
            xlSheet = xlFile.add_sheet(ThisOne)
            Av_all = do_diurnalstats(Month,
                                     Hdh,
                                     data,
                                     xlSheet,
                                     format_string=fmt_str,
                                     ts=ts)
            # now do it for each day
            # we want to preserve any data that has been truncated by the use of the "startnextday"
            # and "endpreviousday" match options used above.  Here we revisit the start and end indices
            # and adjust these backwards and forwards respectively if data has been truncated.
            nDays_daily = nDays
            ei_daily = ei
            si_daily = si
            sdate = ldt[0]
            edate = ldt[-1]
            # is there data after the current end date?
            if dt[-1] > ldt[-1]:
                # if so, push the end index back by 1 day so it is included
                ei_daily = ei + ntsInDay
                nDays_daily = nDays_daily + 1
                edate = ldt[-1] + datetime.timedelta(days=1)
            # is there data before the current start date?
            if dt[0] < ldt[0]:
                # if so, push the start index back by 1 day so it is included
                si_daily = si - ntsInDay
                nDays_daily = nDays_daily + 1
                sdate = ldt[0] - datetime.timedelta(days=1)
            # get the data and use the "pad" option to add missing data if required to
            # complete the extra days
            data, f, a = qcutils.GetSeriesasMA(ds,
                                               ThisOne,
                                               si=si_daily,
                                               ei=ei_daily,
                                               mode="pad")
            data_daily = data.reshape(nDays_daily, ntsInDay)
            xlSheet = xlFile.add_sheet(ThisOne + '(day)')
            write_data_1columnpertimestep(xlSheet,
                                          data_daily,
                                          ts,
                                          startdate=sdate,
                                          format_string=fmt_str)
            data_daily_i = do_2dinterpolation(data_daily)
            xlSheet = xlFile.add_sheet(ThisOne + 'i(day)')
            write_data_1columnpertimestep(xlSheet,
                                          data_daily_i,
                                          ts,
                                          startdate=sdate,
                                          format_string=fmt_str)
        elif ThisOne == "EF":
            log.info(" Doing evaporative fraction")
            EF = numpy.ma.zeros([48, 12]) + float(c.missing_value)
            Hdh, f, a = qcutils.GetSeriesasMA(ds, 'Hdh', si=si, ei=ei)
            Fa, f, a = qcutils.GetSeriesasMA(ds, 'Fa', si=si, ei=ei)
            Fe, f, a = qcutils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei)
            for m in range(1, 13):
                mi = numpy.where(Month == m)[0]
                Fa_Num, Hr, Fa_Av, Sd, Mx, Mn = get_diurnalstats(
                    Hdh[mi], Fa[mi], ts)
                Fe_Num, Hr, Fe_Av, Sd, Mx, Mn = get_diurnalstats(
                    Hdh[mi], Fe[mi], ts)
                index = numpy.ma.where((Fa_Num > 4) & (Fe_Num > 4))
                EF[:, m - 1][index] = Fe_Av[index] / Fa_Av[index]
            # reject EF values greater than upper limit or less than lower limit
            upr, lwr = get_rangecheck_limit(cf, 'EF')
            EF = numpy.ma.filled(
                numpy.ma.masked_where((EF > upr) | (EF < lwr), EF),
                float(c.missing_value))
            # write the EF to the Excel file
            xlSheet = xlFile.add_sheet('EF')
            write_data_1columnpermonth(xlSheet, EF, ts, format_string='0.00')
            # do the 2D interpolation to fill missing EF values
            EFi = do_2dinterpolation(EF)
            xlSheet = xlFile.add_sheet('EFi')
            write_data_1columnpermonth(xlSheet, EFi, ts, format_string='0.00')
            # now do EF for each day
            Fa, f, a = qcutils.GetSeriesasMA(ds, 'Fa', si=si, ei=ei)
            Fe, f, a = qcutils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei)
            EF = Fe / Fa
            EF = numpy.ma.filled(
                numpy.ma.masked_where((EF > upr) | (EF < lwr), EF),
                float(c.missing_value))
            EF_daily = EF.reshape(nDays, ntsInDay)
            xlSheet = xlFile.add_sheet('EF(day)')
            write_data_1columnpertimestep(xlSheet,
                                          EF_daily,
                                          ts,
                                          startdate=ldt[0],
                                          format_string='0.00')
            EFi = do_2dinterpolation(EF_daily)
            xlSheet = xlFile.add_sheet('EFi(day)')
            write_data_1columnpertimestep(xlSheet,
                                          EFi,
                                          ts,
                                          startdate=ldt[0],
                                          format_string='0.00')
        elif ThisOne == "BR":
            log.info(" Doing Bowen ratio")
            BR = numpy.ma.zeros([48, 12]) + float(c.missing_value)
            Fe, f, a = qcutils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei)
            Fh, f, a = qcutils.GetSeriesasMA(ds, 'Fh', si=si, ei=ei)
            for m in range(1, 13):
                mi = numpy.where(Month == m)[0]
                Fh_Num, Hr, Fh_Av, Sd, Mx, Mn = get_diurnalstats(
                    Hdh[mi], Fh[mi], ts)
                Fe_Num, Hr, Fe_Av, Sd, Mx, Mn = get_diurnalstats(
                    Hdh[mi], Fe[mi], ts)
                index = numpy.ma.where((Fh_Num > 4) & (Fe_Num > 4))
                BR[:, m - 1][index] = Fh_Av[index] / Fe_Av[index]
            # reject BR values greater than upper limit or less than lower limit
            upr, lwr = get_rangecheck_limit(cf, 'BR')
            BR = numpy.ma.filled(
                numpy.ma.masked_where((BR > upr) | (BR < lwr), BR),
                float(c.missing_value))
            # write the BR to the Excel file
            xlSheet = xlFile.add_sheet('BR')
            write_data_1columnpermonth(xlSheet, BR, ts, format_string='0.00')
            # do the 2D interpolation to fill missing EF values
            BRi = do_2dinterpolation(BR)
            xlSheet = xlFile.add_sheet('BRi')
            write_data_1columnpermonth(xlSheet, BRi, ts, format_string='0.00')
            # now do BR for each day ...
            Fe, f, a = qcutils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei)
            Fh, f, a = qcutils.GetSeriesasMA(ds, 'Fh', si=si, ei=ei)
            BR = Fh / Fe
            BR = numpy.ma.filled(
                numpy.ma.masked_where((BR > upr) | (BR < lwr), BR),
                float(c.missing_value))
            BR_daily = BR.reshape(nDays, ntsInDay)
            xlSheet = xlFile.add_sheet('BR(day)')
            write_data_1columnpertimestep(xlSheet,
                                          BR_daily,
                                          ts,
                                          startdate=ldt[0],
                                          format_string='0.00')
            BRi = do_2dinterpolation(BR_daily)
            xlSheet = xlFile.add_sheet('BRi(day)')
            write_data_1columnpertimestep(xlSheet,
                                          BRi,
                                          ts,
                                          startdate=ldt[0],
                                          format_string='0.00')
        elif ThisOne == "WUE":
            log.info(" Doing ecosystem WUE")
            WUE = numpy.ma.zeros([48, 12]) + float(c.missing_value)
            Fe, f, a = qcutils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei)
            Fc, f, a = qcutils.GetSeriesasMA(ds, 'Fc', si=si, ei=ei)
            for m in range(1, 13):
                mi = numpy.where(Month == m)[0]
                Fc_Num, Hr, Fc_Av, Sd, Mx, Mn = get_diurnalstats(
                    Hdh[mi], Fc[mi], ts)
                Fe_Num, Hr, Fe_Av, Sd, Mx, Mn = get_diurnalstats(
                    Hdh[mi], Fe[mi], ts)
                index = numpy.ma.where((Fc_Num > 4) & (Fe_Num > 4))
                WUE[:, m - 1][index] = Fc_Av[index] / Fe_Av[index]
            # reject WUE values greater than upper limit or less than lower limit
            upr, lwr = get_rangecheck_limit(cf, 'WUE')
            WUE = numpy.ma.filled(
                numpy.ma.masked_where((WUE > upr) | (WUE < lwr), WUE),
                float(c.missing_value))
            # write the WUE to the Excel file
            xlSheet = xlFile.add_sheet('WUE')
            write_data_1columnpermonth(xlSheet,
                                       WUE,
                                       ts,
                                       format_string='0.00000')
            # do the 2D interpolation to fill missing EF values
            WUEi = do_2dinterpolation(WUE)
            xlSheet = xlFile.add_sheet('WUEi')
            write_data_1columnpermonth(xlSheet,
                                       WUEi,
                                       ts,
                                       format_string='0.00000')
            # now do WUE for each day ...
            Fe, f, a = qcutils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei)
            Fc, f, a = qcutils.GetSeriesasMA(ds, 'Fc', si=si, ei=ei)
            WUE = Fc / Fe
            WUE = numpy.ma.filled(
                numpy.ma.masked_where((WUE > upr) | (WUE < lwr), WUE),
                float(c.missing_value))
            WUE_daily = WUE.reshape(nDays, ntsInDay)
            xlSheet = xlFile.add_sheet('WUE(day)')
            write_data_1columnpertimestep(xlSheet,
                                          WUE_daily,
                                          ts,
                                          startdate=ldt[0],
                                          format_string='0.00000')
            WUEi = do_2dinterpolation(WUE_daily)
            xlSheet = xlFile.add_sheet('WUEi(day)')
            write_data_1columnpertimestep(xlSheet,
                                          WUEi,
                                          ts,
                                          startdate=ldt[0],
                                          format_string='0.00000')
        else:
            log.warning(" qcclim.climatology: requested variable " + ThisOne +
                        " not in data structure")
            continue
    log.info(" Saving Excel file " + xl_filename)
    xlFile.save(xl_filename)
Esempio n. 2
0
def l4qc(cf, ds3):

    # !!! code here to use existing L4 file
    # logic
    # if the L4 doesn't exist
    #  - create ds4 by using copy.deepcopy(ds3)
    # if the L4 does exist and the "UseExistingL4File" option is False
    #  - create ds4 by using copy.deepcopy(ds3)
    # if the L4 does exist and the "UseExistingL4File" option is True
    #  - read the contents of the L4 netCDF file
    #  - check the start and end dates of the L3 and L4 data
    #     - if these are the same then tell the user there is nothing to do
    #  - copy the L3 data to the L4 data structure
    #  - replace the L3 data with the L4 data
    #ds4 = copy.deepcopy(ds3)
    ds4 = qcio.copy_datastructure(cf, ds3)
    # ds4 will be empty (logical false) if an error occurs in copy_datastructure
    # return from this routine if this is the case
    if not ds4: return ds4
    # set some attributes for this level
    qcutils.UpdateGlobalAttributes(cf, ds4, "L4")
    ds4.cf = cf
    # calculate the available energy
    if "Fa" not in ds4.series.keys():
        qcts.CalculateAvailableEnergy(ds4, Fa_out='Fa', Fn_in='Fn', Fg_in='Fg')
    # create a dictionary to hold the gap filling data
    ds_alt = {}
    # check to see if we have any imports
    qcgf.ImportSeries(cf, ds4)
    # re-apply the quality control checks (range, diurnal and rules)
    qcck.do_qcchecks(cf, ds4)
    # now do the meteorological driver gap filling
    for ThisOne in cf["Drivers"].keys():
        if ThisOne not in ds4.series.keys():
            log.error("Series " + ThisOne + " not in data structure")
            continue
        # parse the control file for information on how the user wants to do the gap filling
        qcgf.GapFillParseControlFile(cf, ds4, ThisOne, ds_alt)
    # *** start of the section that does the gap filling of the drivers ***
    # fill short gaps using interpolation
    qcgf.GapFillUsingInterpolation(cf, ds4)
    # gap fill using climatology
    qcgf.GapFillFromClimatology(ds4)
    # do the gap filling using the ACCESS output
    qcgf.GapFillFromAlternate(cf, ds4, ds_alt)
    if ds4.returncodes["alternate"] == "quit": return ds4
    # gap fill using SOLO
    qcgf.GapFillUsingSOLO(cf, ds3, ds4)
    if ds4.returncodes["solo"] == "quit": return ds4
    # merge the first group of gap filled drivers into a single series
    qcts.MergeSeriesUsingDict(ds4, merge_order="prerequisite")
    # re-calculate the ground heat flux but only if requested in control file
    opt = qcutils.get_keyvaluefromcf(cf, ["Options"],
                                     "CorrectFgForStorage",
                                     default="No",
                                     mode="quiet")
    if opt.lower() != "no":
        qcts.CorrectFgForStorage(cf,
                                 ds4,
                                 Fg_out='Fg',
                                 Fg_in='Fg_Av',
                                 Ts_in='Ts',
                                 Sws_in='Sws')
    # re-calculate the net radiation
    qcts.CalculateNetRadiation(cf,
                               ds4,
                               Fn_out='Fn',
                               Fsd_in='Fsd',
                               Fsu_in='Fsu',
                               Fld_in='Fld',
                               Flu_in='Flu')
    # re-calculate the available energy
    qcts.CalculateAvailableEnergy(ds4, Fa_out='Fa', Fn_in='Fn', Fg_in='Fg')
    # merge the second group of gap filled drivers into a single series
    qcts.MergeSeriesUsingDict(ds4, merge_order="standard")
    # re-calculate the water vapour concentrations
    qcts.CalculateHumiditiesAfterGapFill(ds4)
    # re-calculate the meteorological variables
    qcts.CalculateMeteorologicalVariables(ds4)
    # the Tumba rhumba
    qcts.CalculateComponentsFromWsWd(ds4)
    # check for any missing data
    qcutils.get_missingingapfilledseries(ds4)
    # write the percentage of good data as a variable attribute
    qcutils.get_coverage_individual(ds4)
    # write the percentage of good data for groups
    qcutils.get_coverage_groups(ds4)

    return ds4
Esempio n. 3
0
def l3qc(cf, ds2):
    """
        Corrections
        Generates L3 from L2 data
        
        Functions performed:
            qcts.AddMetVars (optional)
            qcts.CorrectSWC (optional*)
            qcck.do_linear (all sites)
            qcutils.GetMergeList + qcts.MergeSeries Ah_EC (optional)x
            qcts.TaFromTv (optional)
            qcutils.GetMergeList + qcts.MergeSeries Ta_EC (optional)x
            qcts.CoordRotation2D (all sites)
            qcts.MassmanApprox (optional*)y
            qcts.Massman (optional*)y
            qcts.CalculateFluxes (used if Massman not optioned)x
            qcts.CalculateFluxesRM (used if Massman optioned)y
            qcts.FhvtoFh (all sites)
            qcts.Fe_WPL (WPL computed on fluxes, as with Campbell algorithm)+x
            qcts.Fc_WPL (WPL computed on fluxes, as with Campbell algorithm)+x
            qcts.Fe_WPLcov (WPL computed on kinematic fluxes (ie, covariances), as with WPL80)+y
            qcts.Fc_WPLcov (WPL computed on kinematic fluxes (ie, covariances), as with WPL80)+y
            qcts.CalculateNetRadiation (optional)
            qcutils.GetMergeList + qcts.MergeSeries Fsd (optional)
            qcutils.GetMergeList + qcts.MergeSeries Fn (optional*)
            qcts.InterpolateOverMissing (optional)
            AverageSeriesByElements (optional)
            qcts.CorrectFgForStorage (all sites)
            qcts.Average3SeriesByElements (optional)
            qcts.CalculateAvailableEnergy (optional)
            qcck.do_qcchecks (all sites)
            qcck.gaps (optional)
            
            *:  requires ancillary measurements for paratmerisation
            +:  each site requires one pair, either Fe_WPL & Fc_WPL (default) or Fe_WPLCov & FcWPLCov
            x:  required together in option set
            y:  required together in option set
        """
    # make a copy of the L2 data
    ds3 = copy.deepcopy(ds2)
    # set some attributes for this level
    qcutils.UpdateGlobalAttributes(cf, ds3, "L3")
    # initialise the global attribute to document the functions used
    ds3.globalattributes['Functions'] = ''
    # put the control file name into the global attributes
    ds3.globalattributes['controlfile_name'] = cf['controlfile_name']
    # check to see if we have any imports
    qcgf.ImportSeries(cf, ds3)
    # correct measured soil water content using empirical relationship to collected samples
    qcts.CorrectSWC(cf, ds3)
    # apply linear corrections to the data
    qcck.do_linear(cf, ds3)
    # merge whatever humidities are available
    qcts.MergeHumidities(cf, ds3, convert_units=True)
    # get the air temperature from the CSAT virtual temperature
    qcts.TaFromTv(cf, ds3)
    # merge the HMP and corrected CSAT data
    qcts.MergeSeries(cf, ds3, 'Ta', [0, 10], convert_units=True)
    qcutils.CheckUnits(ds3, "Ta", "C", convert_units=True)
    # calculate humidities (absolute, specific and relative) from whatever is available
    qcts.CalculateHumidities(ds3)
    # merge the 7500 CO2 concentration
    qcts.MergeSeries(cf, ds3, 'Cc', [0, 10], convert_units=True)
    qcutils.CheckUnits(ds3, "Cc", "mg/m3", convert_units=True)
    # add relevant meteorological values to L3 data
    qcts.CalculateMeteorologicalVariables(ds3)
    # check to see if the user wants to use the fluxes in the L2 file
    if not qcutils.cfoptionskeylogical(cf, Key="UseL2Fluxes", default=False):
        # check the covariancve units and change if necessary
        qcts.CheckCovarianceUnits(ds3)
        # do the 2D coordinate rotation
        qcts.CoordRotation2D(cf, ds3)
        # do the Massman frequency attenuation correction
        qcts.MassmanStandard(cf, ds3)
        # calculate the fluxes
        qcts.CalculateFluxes(cf, ds3)
        # approximate wT from virtual wT using wA (ref: Campbell OPECSystem manual)
        qcts.FhvtoFh(cf, ds3)
        # correct the H2O & CO2 flux due to effects of flux on density measurements
        qcts.Fe_WPL(cf, ds3)
        qcts.Fc_WPL(cf, ds3)
    # convert CO2 units if required
    qcutils.ConvertCO2Units(cf, ds3, Cc='Cc')
    # calculate Fc storage term - single height only at present
    qcts.CalculateFcStorage(cf, ds3)
    # convert Fc and Fc_storage units if required
    qcutils.ConvertFcUnits(cf, ds3, Fc='Fc', Fc_storage='Fc_storage')
    # correct Fc for storage term - only recommended if storage calculated from profile available
    qcts.CorrectFcForStorage(cf, ds3)
    # merge the incoming shortwave radiation
    qcts.MergeSeries(cf, ds3, 'Fsd', [0, 10])
    # calculate the net radiation from the Kipp and Zonen CNR1
    qcts.CalculateNetRadiation(cf,
                               ds3,
                               Fn_out='Fn_KZ',
                               Fsd_in='Fsd',
                               Fsu_in='Fsu',
                               Fld_in='Fld',
                               Flu_in='Flu')
    qcts.MergeSeries(cf, ds3, 'Fn', [0, 10])
    # combine wind speed from the Wind Sentry and  the CSAT
    qcts.MergeSeries(cf, ds3, 'Ws', [0, 10])
    # combine wind direction from the Wind Sentry and  the CSAT
    qcts.MergeSeries(cf, ds3, 'Wd', [0, 10])
    # correct soil heat flux for storage
    #    ... either average the raw ground heat flux, soil temperature and moisture
    #        and then do the correction (OzFlux "standard")
    qcts.AverageSeriesByElements(cf, ds3, 'Ts')
    qcts.AverageSeriesByElements(cf, ds3, 'Sws')
    if qcutils.cfoptionskeylogical(cf, Key='CorrectIndividualFg'):
        #    ... or correct the individual ground heat flux measurements (James' method)
        qcts.CorrectIndividualFgForStorage(cf, ds3)
        qcts.AverageSeriesByElements(cf, ds3, 'Fg')
    else:
        qcts.AverageSeriesByElements(cf, ds3, 'Fg')
        qcts.CorrectFgForStorage(cf,
                                 ds3,
                                 Fg_out='Fg',
                                 Fg_in='Fg',
                                 Ts_in='Ts',
                                 Sws_in='Sws')
    # calculate the available energy
    qcts.CalculateAvailableEnergy(ds3, Fa_out='Fa', Fn_in='Fn', Fg_in='Fg')
    # create new series using MergeSeries or AverageSeries
    qcck.CreateNewSeries(cf, ds3)
    # create a series of daily averaged soil moisture interpolated back to the time step
    #qcts.DailyAverageSws_Interpolated(cf,ds3,Sws_out='Sws_daily',Sws_in='Sws')
    # re-apply the quality control checks (range, diurnal and rules)
    qcck.do_qcchecks(cf, ds3)
    # coordinate gaps in the three main fluxes
    qcck.CoordinateFluxGaps(cf, ds3)
    # coordinate gaps in Ah_7500_Av with Fc
    qcck.CoordinateAh7500AndFcGaps(cf, ds3)
    # get the statistics for the QC flags and write these to an Excel spreadsheet
    qcio.get_seriesstats(cf, ds3)
    # write the percentage of good data as a variable attribute
    qcutils.get_coverage_individual(ds3)
    # write the percentage of good data for groups
    qcutils.get_coverage_groups(ds3)

    return ds3
Esempio n. 4
0
def l3qc(cf, ds2):
    """
    """
    # make a copy of the L2 data
    ds3 = copy.deepcopy(ds2)
    # set some attributes for this level
    qcutils.UpdateGlobalAttributes(cf, ds3, "L3")
    # put the control file name into the global attributes
    ds3.globalattributes['controlfile_name'] = cf['controlfile_name']
    # check to see if we have any imports
    qcgf.ImportSeries(cf, ds3)
    # apply linear corrections to the data
    qcck.do_linear(cf, ds3)
    # ************************
    # *** Merge humidities ***
    # ************************
    # merge whatever humidities are available
    qcts.MergeHumidities(cf, ds3, convert_units=True)
    # **************************
    # *** Merge temperatures ***
    # **************************
    # get the air temperature from the CSAT virtual temperature
    qcts.TaFromTv(cf, ds3)
    # merge the HMP and corrected CSAT data
    qcts.MergeSeries(cf, ds3, "Ta", convert_units=True)
    qcutils.CheckUnits(ds3, "Ta", "C", convert_units=True)
    # ***************************
    # *** Calcuate humidities ***
    # ***************************
    # calculate humidities (absolute, specific and relative) from whatever is available
    qcts.CalculateHumidities(ds3)
    # ********************************
    # *** Merge CO2 concentrations ***
    # ********************************
    # merge the 7500 CO2 concentration
    # PRI 09/08/2017 possibly the ugliest thing I have done yet
    # This needs to be abstracted to a general alias checking routine at the
    # start of the L3 processing so that possible aliases are mapped to a single
    # set of variable names.
    if "CO2" in cf["Variables"]:
        CO2 = "CO2"
    elif "Cc" in cf["Variables"]:
        CO2 = "Cc"
    else:
        msg = "Label for CO2 ('CO2','Cc') not found in control file"
        logger.error(msg)
        return
    qcts.MergeSeries(cf, ds3, CO2, convert_units=True)
    # ******************************************
    # *** Calculate meteorological variables ***
    # ******************************************
    # Update meteorological variables
    qcts.CalculateMeteorologicalVariables(ds3)
    # *************************************************
    # *** Calculate fluxes from covariances section ***
    # *************************************************
    # check to see if the user wants to use the fluxes in the L2 file
    if not qcutils.cfoptionskeylogical(cf, Key="UseL2Fluxes", default=False):
        # check the covariance units and change if necessary
        qcts.CheckCovarianceUnits(ds3)
        # do the 2D coordinate rotation
        qcts.CoordRotation2D(cf, ds3)
        # do the Massman frequency attenuation correction
        qcts.MassmanStandard(cf, ds3)
        # calculate the fluxes
        qcts.CalculateFluxes(cf, ds3)
        # approximate wT from virtual wT using wA (ref: Campbell OPECSystem manual)
        qcts.FhvtoFh(cf, ds3)
        # correct the H2O & CO2 flux due to effects of flux on density measurements
        qcts.Fe_WPL(cf, ds3)
        qcts.Fc_WPL(cf, ds3)
    # **************************************
    # *** Calculate Monin-Obukhov length ***
    # **************************************
    qcts.CalculateMoninObukhovLength(ds3)
    # **************************
    # *** CO2 and Fc section ***
    # **************************
    # convert CO2 units if required
    qcutils.ConvertCO2Units(cf, ds3, CO2=CO2)
    # calculate Fc storage term - single height only at present
    qcts.CalculateFcStorageSinglePoint(cf, ds3, Fc_out='Fc_single', CO2_in=CO2)
    # convert Fc and Fc_storage units if required
    qcutils.ConvertFcUnits(cf, ds3)
    # merge Fc and Fc_storage series if required
    merge_list = [
        label for label in cf["Variables"].keys() if label[0:2] == "Fc"
        and "MergeSeries" in cf["Variables"][label].keys()
    ]
    for label in merge_list:
        qcts.MergeSeries(cf, ds3, label, save_originals=True)
    # correct Fc for storage term - only recommended if storage calculated from profile available
    qcts.CorrectFcForStorage(cf, ds3)
    # *************************
    # *** Radiation section ***
    # *************************
    # merge the incoming shortwave radiation
    qcts.MergeSeries(cf, ds3, 'Fsd')
    # calculate the net radiation from the Kipp and Zonen CNR1
    qcts.CalculateNetRadiation(cf,
                               ds3,
                               Fn_out='Fn_KZ',
                               Fsd_in='Fsd',
                               Fsu_in='Fsu',
                               Fld_in='Fld',
                               Flu_in='Flu')
    qcts.MergeSeries(cf, ds3, 'Fn')
    # ****************************************
    # *** Wind speed and direction section ***
    # ****************************************
    # combine wind speed from the Wind Sentry and the SONIC
    qcts.MergeSeries(cf, ds3, 'Ws')
    # combine wind direction from the Wind Sentry and the SONIC
    qcts.MergeSeries(cf, ds3, 'Wd')
    # ********************
    # *** Soil section ***
    # ********************
    # correct soil heat flux for storage
    #    ... either average the raw ground heat flux, soil temperature and moisture
    #        and then do the correction (OzFlux "standard")
    qcts.AverageSeriesByElements(cf, ds3, 'Ts')
    qcts.AverageSeriesByElements(cf, ds3, 'Sws')
    if qcutils.cfoptionskeylogical(cf, Key='CorrectIndividualFg'):
        #    ... or correct the individual ground heat flux measurements (James' method)
        qcts.CorrectIndividualFgForStorage(cf, ds3)
        qcts.AverageSeriesByElements(cf, ds3, 'Fg')
    else:
        qcts.AverageSeriesByElements(cf, ds3, 'Fg')
        qcts.CorrectFgForStorage(cf,
                                 ds3,
                                 Fg_out='Fg',
                                 Fg_in='Fg',
                                 Ts_in='Ts',
                                 Sws_in='Sws')
    # calculate the available energy
    qcts.CalculateAvailableEnergy(ds3, Fa_out='Fa', Fn_in='Fn', Fg_in='Fg')
    # create new series using MergeSeries or AverageSeries
    qcck.CreateNewSeries(cf, ds3)
    # re-apply the quality control checks (range, diurnal and rules)
    qcck.do_qcchecks(cf, ds3)
    # coordinate gaps in the three main fluxes
    qcck.CoordinateFluxGaps(cf, ds3)
    # coordinate gaps in Ah_7500_Av with Fc
    qcck.CoordinateAh7500AndFcGaps(cf, ds3)
    # check missing data and QC flags are consistent
    qcutils.CheckQCFlags(ds3)
    # get the statistics for the QC flags and write these to an Excel spreadsheet
    qcio.get_seriesstats(cf, ds3)
    # write the percentage of good data as a variable attribute
    qcutils.get_coverage_individual(ds3)
    # write the percentage of good data for groups
    qcutils.get_coverage_groups(ds3)

    return ds3
Esempio n. 5
0
def l3qc(cf, ds2):
    """
        Corrections
        Generates L3 from L2 data
        
        Functions performed:
            qcts.AddMetVars (optional)
            qcts.CorrectSWC (optional*)
            qcck.do_linear (all sites)
            qcutils.GetMergeList + qcts.MergeSeries Ah_EC (optional)x
            qcts.TaFromTv (optional)
            qcutils.GetMergeList + qcts.MergeSeries Ta_EC (optional)x
            qcts.CoordRotation2D (all sites)
            qcts.MassmanApprox (optional*)y
            qcts.Massman (optional*)y
            qcts.CalculateFluxes (used if Massman not optioned)x
            qcts.CalculateFluxesRM (used if Massman optioned)y
            qcts.FhvtoFh (all sites)
            qcts.Fe_WPL (WPL computed on fluxes, as with Campbell algorithm)+x
            qcts.Fc_WPL (WPL computed on fluxes, as with Campbell algorithm)+x
            qcts.Fe_WPLcov (WPL computed on kinematic fluxes (ie, covariances), as with WPL80)+y
            qcts.Fc_WPLcov (WPL computed on kinematic fluxes (ie, covariances), as with WPL80)+y
            qcts.CalculateNetRadiation (optional)
            qcutils.GetMergeList + qcts.MergeSeries Fsd (optional)
            qcutils.GetMergeList + qcts.MergeSeries Fn (optional*)
            qcts.InterpolateOverMissing (optional)
            AverageSeriesByElements (optional)
            qcts.CorrectFgForStorage (all sites)
            qcts.Average3SeriesByElements (optional)
            qcts.CalculateAvailableEnergy (optional)
            qcck.do_qcchecks (all sites)
            qcck.gaps (optional)
            
            *:  requires ancillary measurements for paratmerisation
            +:  each site requires one pair, either Fe_WPL & Fc_WPL (default) or Fe_WPLCov & FcWPLCov
            x:  required together in option set
            y:  required together in option set
        """
    # make a copy of the L2 data
    ds3 = copy.deepcopy(ds2)
    ds3.globalattributes['nc_level'] = 'L3'
    ds3.globalattributes['EPDversion'] = sys.version
    ds3.globalattributes['QC_version_history'] = cfg.__doc__
    # put the control file name into the global attributes
    ds3.globalattributes['controlfile_name'] = cf['controlfile_name']

    # calculate NDVI
    if qcutils.cfkeycheck(
            cf, Base='Functions',
            ThisOne='NDVI') and cf['Functions']['NDVI'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', calculateNDVI'
        except:
            ds3.globalattributes['L3Functions'] = 'calculateNDVI'

        log.info(' Calculating NDVI from component reflectances ...')
        qcts.CalculateNDVI(cf, ds3)

    # bypass soil temperature correction for Sws (when Ts bad)
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='BypassSwsTcorr'
                          ) and cf['Functions']['BypassSwsTcorr'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', BypassSwsTcorr'
        except:
            ds3.globalattributes['L3Functions'] = 'BypassSwsTcorr'

        log.info(' Re-computing Sws without temperature correction ...')
        qcts.BypassTcorr(cf, ds3)

    # correct measured soil water content using empirical relationship to collected samples
    if qcutils.cfkeycheck(
            cf, Base='Functions',
            ThisOne='CorrectSWC') and cf['Functions']['CorrectSWC'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', CorrectSWC'
        except:
            ds3.globalattributes['L3Functions'] = 'CorrectSWC'

        log.info(' Correcting soil moisture data ...')
        qcts.CorrectSWC(cf, ds3)

    # apply linear corrections to the data
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections'
                          ) and cf['Functions']['Corrections'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', do_linear'
        except:
            ds3.globalattributes['L3Functions'] = 'do_linear'

        log.info(' Applying linear corrections ...')
        qcck.do_linear(cf, ds3)

    # determine HMP Ah if not output by datalogger
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='CalculateAh'
                          ) and cf['Functions']['CalculateAh'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', CalculateAh'
        except:
            ds3.globalattributes['L3Functions'] = 'CalculateAh'

        log.info(' Adding HMP Ah to database')
        qcts.CalculateAhHMP(cf, ds3)

    # merge the HMP and corrected 7500 data
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='MergeSeriesAhTa'
                          ) and cf['Functions']['MergeSeriesAhTa'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', MergeSeriesAhTaCc'
        except:
            ds3.globalattributes['L3Functions'] = 'MergeSeriesAhTaCc'

        qcts.MergeSeries(cf, ds3, 'Ah', [0, 10])
        qcts.MergeSeries(cf, ds3, 'Cc', [0, 10])

        # get the air temperature from the CSAT virtual temperature
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', TaFromTv'
        except:
            ds3.globalattributes['L3Functions'] = 'TaFromTv'

        qcts.TaFromTv(cf, ds3)

        # merge the HMP and corrected CSAT data
        qcts.MergeSeries(cf, ds3, 'Ta', [0, 10])

    # add relevant meteorological values to L3 data
    if (qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections')
            and cf['Functions']['Corrections']
            == 'True') or (qcutils.cfkeycheck(
                cf, Base='Functions', ThisOne='CalculateMetVars')
                           and cf['Functions']['CalculateMetVars'] == 'True'):
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', CalculateMetVars'
        except:
            ds3.globalattributes['L3Functions'] = 'CalculateMetVars'

        log.info(' Adding standard met variables to database')
        qcts.CalculateMeteorologicalVariables(ds3)

    # do the 2D coordinate rotation
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections'
                          ) and cf['Functions']['Corrections'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', CoordRotation2D'
        except:
            ds3.globalattributes['L3Functions'] = 'CoordRotation2D'

        qcts.CoordRotation2D(cf, ds3)

    # do the Massman frequency attenuation correction
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections'
                          ) and cf['Functions']['Corrections'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', Massman'
        except:
            ds3.globalattributes['L3Functions'] = 'Massman'

        qcts.MassmanStandard(cf, ds3)

    # calculate the fluxes
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections'
                          ) and cf['Functions']['Corrections'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', CalculateFluxes'
        except:
            ds3.globalattributes['L3Functions'] = 'CalculateFluxes'

        qcts.CalculateFluxes(cf, ds3)

    # approximate wT from virtual wT using wA (ref: Campbell OPECSystem manual)
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections'
                          ) and cf['Functions']['Corrections'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', FhvtoFh'
        except:
            ds3.globalattributes['L3Functions'] = 'FhvtoFh'

        qcts.FhvtoFh(cf, ds3)

    # correct the H2O & CO2 flux due to effects of flux on density measurements
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections'
                          ) and cf['Functions']['Corrections'] == 'True':
        if qcutils.cfkeycheck(
                cf, Base='Functions',
                ThisOne='WPLcov') and cf['Functions']['WPLcov'] == 'True':
            try:
                ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                    'L3Functions'] + ', WPLcov'
            except:
                ds3.globalattributes['L3Functions'] = 'WPLcov'

            qcts.do_WPL(cf, ds3, cov='True')
        else:
            try:
                ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                    'L3Functions'] + ', WPL'
            except:
                ds3.globalattributes['L3Functions'] = 'WPL'

            qcts.do_WPL(cf, ds3)

    # calculate the net radiation from the Kipp and Zonen CNR1
    if qcutils.cfkeycheck(
            cf, Base='Functions', ThisOne='CalculateNetRadiation'
    ) and cf['Functions']['CalculateNetRadiation'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', CalculateNetRadiation'
        except:
            ds3.globalattributes['L3Functions'] = 'CalculateNetRadiation'

        qcts.MergeSeries(cf, ds3, 'Fsd', [0, 10])
        qcts.CalculateNetRadiation(ds3, 'Fn_KZ', 'Fsd', 'Fsu', 'Fld', 'Flu')
        qcts.MergeSeries(cf, ds3, 'Fn', [0, 10])

    # combine wind speed from the CSAT and the Wind Sentry
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='MergeSeriesWS'
                          ) and cf['Functions']['MergeSeriesWS'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', MergeSeriesWS'
        except:
            ds3.globalattributes['L3Functions'] = 'MergeSeriesWS'

        qcts.MergeSeries(cf, ds3, 'Ws', [0, 10])

    # average the soil temperature data
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections'
                          ) and cf['Functions']['Corrections'] == 'True':
        if 'SoilAverage' not in ds3.globalattributes['L3Functions']:
            try:
                ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                    'L3Functions'] + ', SoilAverage'
            except:
                ds3.globalattributes['L3Functions'] = 'SoilAverage'

        # interpolate over any ramaining gaps up to 3 hours in length
        qcts.AverageSeriesByElementsI(cf, ds3, 'Ts')
        qcts.AverageSeriesByElementsI(cf, ds3, 'Sws')

    # correct the measured soil heat flux for storage in the soil layer above the sensor
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections'
                          ) and cf['Functions']['Corrections'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', CorrectFgForStorage'
        except:
            ds3.globalattributes['L3Functions'] = 'CorrectFgForStorage'

        if qcutils.cfkeycheck(
                cf, Base='Functions', ThisOne='IndividualFgCorrection'
        ) and cf['Functions']['IndividualFgCorrection'] == 'True':
            qcts.CorrectIndividualFgForStorage(cf, ds3)
            qcts.AverageSeriesByElementsI(cf, ds3, 'Fg')
        else:
            qcts.AverageSeriesByElementsI(cf, ds3, 'Fg')
            qcts.CorrectGroupFgForStorage(cf, ds3)

    # calculate the available energy
    if qcutils.cfkeycheck(
            cf, Base='Functions', ThisOne='CalculateAvailableEnergy'
    ) and cf['Functions']['CalculateAvailableEnergy'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', CalculateAvailableEnergy'
        except:
            ds3.globalattributes['L3Functions'] = 'CalculateAvailableEnergy'

        qcts.CalculateAvailableEnergy(ds3)

    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='DiagnosticMode'):
        if cf['Functions']['DiagnosticMode'] == 'False':
            qcutils.prepOzFluxVars(cf, ds3)
    else:
        qcutils.prepOzFluxVars(cf, ds3)

    # calculate specific humidity and saturated specific humidity profile
    if qcutils.cfkeycheck(
            cf, Base='Functions',
            ThisOne='qTprofile') and cf['Functions']['qTprofile'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', qTprofile'
        except:
            ds3.globalattributes['L3Functions'] = 'qTprofile'

        qcts.CalculateSpecificHumidityProfile(cf, ds3)

    # calculate Penman-Monteith inversion
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='PenmanMonteith'
                          ) and cf['Functions']['PenmanMonteith'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', PenmanMonteith'
        except:
            ds3.globalattributes['L3Functions'] = 'PenmanMonteith'

        qcts.do_PenmanMonteith(cf, ds3)

    # calculate bulk Richardson numbers
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='bulkRichardson'
                          ) and cf['Functions']['bulkRichardson'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', bulkRichardson'
        except:
            ds3.globalattributes['L3Functions'] = 'bulkRichardson'

        qcts.do_bulkRichardson(cf, ds3)

    # re-apply the quality control checks (range, diurnal and rules)
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections'
                          ) and cf['Functions']['Corrections'] == 'True':
        ds3.globalattributes['L3Functions'] = ds3.globalattributes[
            'L3Functions'] + ', do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)'
        qcck.do_qcchecks(cf, ds3)

    # quality control checks (range, diurnal and rules) without flux post-processing
    if qcutils.cfkeycheck(
            cf, Base='Functions',
            ThisOne='QCChecks') and cf['Functions']['QCChecks'] == 'True':
        qcck.do_qcchecks(cf, ds3)

    # apply the ustar filter
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='ustarFilter'
                          ) and cf['Functions']['ustarFilter'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', ustarFilter'
        except:
            ds3.globalattributes['L3Functions'] = 'ustarFilter'

        qcts.FilterFcByUstar(cf, ds3)

    # coordinate gaps in the three main fluxes
    if qcutils.cfkeycheck(
            cf, Base='Functions', ThisOne='CoordinateFluxGaps'
    ) and cf['Functions']['CoordinateFluxGaps'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', CoordinateFluxGaps'
        except:
            ds3.globalattributes['L3Functions'] = 'CoordinateFluxGaps'

        qcck.CoordinateFluxGaps(cf, ds3)

    # coordinate gaps in Ah_7500_Av with Fc
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections'
                          ) and cf['Functions']['Corrections'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', CoordinateAh7500AndFcGaps'
        except:
            ds3.globalattributes['L3Functions'] = 'CoordinateAh7500AndFcGaps'

        qcck.CoordinateAh7500AndFcGaps(cf, ds3)

    # calcluate ET at observation interval
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='CalculateET'
                          ) and cf['Functions']['CalculateET'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', CalculateET'
        except:
            ds3.globalattributes['L3Functions'] = 'CalculateET'

        log.info(' Calculating ET')
        qcts.CalculateET(cf, ds3, 'L3')

    # run MOST (Buckingham Pi) 2d footprint model (Kljun et al. 2004)
    if qcutils.cfkeycheck(
            cf, Base='Functions',
            ThisOne='footprint') and cf['Functions']['footprint'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', footprint'
        except:
            ds3.globalattributes['L3Functions'] = 'footprint'

        qcts.do_footprint_2d(cf, ds3)

    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Corrections'
                          ) and cf['Functions']['Corrections'] == 'True':
        qcio.get_seriesstats(cf, ds3)

    # convert Fc [mgCO2 m-2 s-1] to Fc_co2 [mgCO2 m-2 s-1], Fc_c [mgC m-2 s-1], NEE [umol m-2 s-1] and NEP = - NEE
    if qcutils.cfkeycheck(
            cf, Base='Functions',
            ThisOne='convertFc') and cf['Functions']['convertFc'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', convertFc'
        except:
            ds3.globalattributes['L3Functions'] = 'convertFc'

        qcts.ConvertFc(cf, ds3)

    # convert Fc [mgCO2 m-2 s-1] to Fc [umol m-2 s-1]
    if qcutils.cfkeycheck(
            cf, Base='Functions',
            ThisOne='JasonFc') and cf['Functions']['JasonFc'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', convertFc (umol only)'
        except:
            ds3.globalattributes['L3Functions'] = 'convertFc (umol only)'

        qcts.ConvertFcJason(cf, ds3)

    # write the percentage of good data as a variable attribute
    qcutils.get_coverage_individual(ds3)
    # write the percentage of good data for groups
    qcutils.get_coverage_groups(ds3)

    # compute water-use efficiency from flux-gradient similarity (appendix A, Scanlon & Sahu 2008)
    if qcutils.cfkeycheck(cf, Base='Functions',
                          ThisOne='wue') and cf['Functions']['wue'] == 'True':
        try:
            ds3.globalattributes[
                'L3Functions'] = ds3.globalattributes['L3Functions'] + ', wue'
        except:
            ds3.globalattributes['L3Functions'] = 'wue'

        log.info(
            ' Calculating water-use efficiency from flux-gradient similarity')
        qcts.CalculateWUEfromSimilarity(cf, ds3)

    # compute climatology for L3 data
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='climatology'
                          ) and cf['Functions']['climatology'] == 'True':
        try:
            ds3.globalattributes['L3Functions'] = ds3.globalattributes[
                'L3Functions'] + ', climatology'
        except:
            ds3.globalattributes['L3Functions'] = 'climatology'

        qcts.do_climatology(cf, ds3)

    if qcutils.cfkeycheck(cf, Base='Functions',
                          ThisOne='Sums') and cf['Functions']['Sums'] == 'L3':
        try:
            ds3.globalattributes[
                'L3Functions'] = ds3.globalattributes['L5Functions'] + ', Sums'
        except:
            ds3.globalattributes['L3Functions'] = 'Sums'

        qcts.do_sums(cf, ds3)

    try:
        ds3.globalattributes['Functions'] = ds3.globalattributes[
            'Functions'] + ', ' + ds3.globalattributes['L3Functions']
    except:
        ds3.globalattributes['Functions'] = ds3.globalattributes['L3Functions']

    return ds3
Esempio n. 6
0
def l5qc(cf, ds4, y):
    ds5 = copy.deepcopy(ds4)
    ds5.globalattributes['nc_level'] = 'L5'
    if (qcutils.cfkeycheck(cf, Base='Functions', ThisOne='L5_offline') and
            cf['Functions']['L5_offline'] == 'True') and qcutils.cfkeycheck(
                cf, Base='Functions', ThisOne='L5_keys'):
        try:
            ds5.globalattributes['L5Functions'] = ds5.globalattributes[
                'L5Functions'] + ', ' + cf['Functions']['L5_keys']
        except:
            ds5.globalattributes['L5Functions'] = cf['Functions']['L5_keys']

        y = y + 1

    # calculate u* from Fh and corrected wind speed
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='UstarFromFh'
                          ) and cf['Functions']['UstarFromFh'] == 'True':
        try:
            ds5.globalattributes['L5Functions'] = ds4.globalattributes[
                'L5Functions'] + ', UstarFromFh'
        except:
            ds4.globalattributes['L5Functions'] = 'UstarFromFh'

        qcts.UstarFromFh(cf, ds5)
        y = y + 1

    # calcluate ET at observation interval
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='CalculateET'
                          ) and cf['Functions']['CalculateET'] == 'True':
        try:
            ds5.globalattributes['L5Functions'] = ds5.globalattributes[
                'L5Functions'] + ', CalculateET'
        except:
            ds5.globalattributes['L5Functions'] = 'CalculateET'

        log.info(' Calculating ET')
        qcts.CalculateET(cf, ds5, 'L5')

    # calculate rst, rc and Gst, Gc from Penman-Monteith inversion
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='PenmanMonteith'
                          ) and cf['Functions']['PenmanMonteith'] == 'True':
        try:
            ds5.globalattributes['L5Functions'] = ds5.globalattributes[
                'L5Functions'] + ', PenmanMonteith'
        except:
            ds5.globalattributes['L5Functions'] = 'PenmanMonteith'

        qcts.do_PenmanMonteith(cf, ds5)

        # re-calculate the available energy from L5 (gapfilled) fluxes
        try:
            ds5.globalattributes['L5Functions'] = ds5.globalattributes[
                'L5Functions'] + ', CalculateAvailableEnergy'
        except:
            ds.globalattributes['L5Functions'] = 'CalculateAvailableEnergy'

        qcts.CalculateAvailableEnergy(ds5)

    # re-apply the quality control checks (range, diurnal and rules)
    if y > 0:
        log.info(' Doing QC checks on L5 data')
        qcck.do_qcchecks(cf, ds5)
        try:
            ds5.globalattributes['L5Functions'] = ds5.globalattributes[
                'L5Functions'] + ', do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)'
        except:
            ds5.globalattributes[
                'L5Functions'] = 'do_qccheck(RangeCheck, diurnalcheck, excludedates, excludehours)'

    try:
        ds5.globalattributes['Functions'] = ds5.globalattributes[
            'Functions'] + ', ' + ds5.globalattributes['L5Functions']
    except:
        ds5.globalattributes['Functions'] = ds5.globalattributes['Functions']

    return ds5, y