예제 #1
0
def l6qc(cf, ds5):
    ds6 = qcio.copy_datastructure(cf, ds5)
    # ds6 will be empty (logical false) if an error occurs in copy_datastructure
    # return from this routine if this is the case
    if not ds6: return ds6
    # set some attributes for this level
    qcutils.UpdateGlobalAttributes(cf, ds6, "L6")
    # parse the control file
    qcrp.ParseL6ControlFile(cf, ds6)
    # check to see if we have any imports
    qcgf.ImportSeries(cf, ds6)
    # check units
    qcutils.CheckUnits(ds6, "Fc", "umol/m2/s", convert_units=True)
    ## filter Fc for night time and ustar threshold, write to ds as "ER"
    #result = qcrp.GetERFromFc(cf,ds6)
    #if result==0: return
    # apply the turbulence filter (if requested)
    qcck.ApplyTurbulenceFilter(cf, ds6)
    qcrp.GetERFromFc2(cf, ds6)
    # estimate ER using SOLO
    qcrp.ERUsingSOLO(cf, ds6)
    # estimate ER using FFNET
    qcrp.ERUsingFFNET(cf, ds6)
    # estimate ER using Lloyd-Taylor
    qcrp.ERUsingLloydTaylor(cf, ds6)
    # estimate ER using Lasslop et al
    qcrp.ERUsingLasslop(cf, ds6)
    # merge the estimates of ER with the observations
    qcts.MergeSeriesUsingDict(ds6, merge_order="standard")
    # calculate NEE from Fc and ER
    qcrp.CalculateNEE(cf, ds6)
    # calculate NEP from NEE
    qcrp.CalculateNEP(cf, ds6)
    # calculate ET from Fe
    qcrp.CalculateET(ds6)
    # partition NEE into GPP and ER
    qcrp.PartitionNEE(cf, ds6)
    # write the percentage of good data as a variable attribute
    qcutils.get_coverage_individual(ds6)
    # write the percentage of good data for groups
    qcutils.get_coverage_groups(ds6)
    # do the L6 summary
    qcrp.L6_summary(cf, ds6)

    return ds6
예제 #2
0
파일: qcls.py 프로젝트: mdekauwe/PyFluxPro
def l5qc(cf, ds4):
    ds5 = qcio.copy_datastructure(cf, ds4)
    # ds4 will be empty (logical false) if an error occurs in copy_datastructure
    # return from this routine if this is the case
    if not ds5:
        return ds5
    # set some attributes for this level
    qcutils.UpdateGlobalAttributes(cf, ds5, "L5")
    ds5.cf = cf
    # create a dictionary to hold the gap filling data
    ds_alt = {}
    # check to see if we have any imports
    qcgf.ImportSeries(cf, ds5)
    # re-apply the quality control checks (range, diurnal and rules)
    qcck.do_qcchecks(cf, ds5)
    # now do the flux gap filling methods
    label_list = qcutils.get_label_list_from_cf(cf)
    for label in label_list:
        # parse the control file for information on how the user wants to do the gap filling
        qcgf.GapFillParseControlFile(cf, ds5, label, ds_alt)
    # *** start of the section that does the gap filling of the fluxes ***
    # apply the turbulence filter (if requested)
    qcck.ApplyTurbulenceFilter(cf, ds5)
    # fill short gaps using interpolation
    qcgf.GapFillUsingInterpolation(cf, ds5)
    # do the gap filling using SOLO
    qcgfSOLO.GapFillUsingSOLO(cf, ds4, ds5)
    if ds5.returncodes["solo"] == "quit":
        return ds5
    # gap fill using marginal distribution sampling
    qcgfMDS.GapFillFluxUsingMDS(cf, ds5)
    # gap fill using climatology
    qcgf.GapFillFromClimatology(ds5)
    # merge the gap filled drivers into a single series
    qcts.MergeSeriesUsingDict(ds5, merge_order="standard")
    # calculate Monin-Obukhov length
    qcts.CalculateMoninObukhovLength(ds5)
    # write the percentage of good data as a variable attribute
    qcutils.get_coverage_individual(ds5)
    # write the percentage of good data for groups
    qcutils.get_coverage_groups(ds5)

    return ds5