def l4qc(main_gui, cf, ds3): ds4 = pfp_io.copy_datastructure(cf, ds3) # ds4 will be empty (logical false) if an error occurs in copy_datastructure # return from this routine if this is the case if not ds4: return ds4 # set some attributes for this level pfp_utils.UpdateGlobalAttributes(cf, ds4, "L4") # check to see if we have any imports pfp_gf.ImportSeries(cf, ds4) # re-apply the quality control checks (range, diurnal and rules) pfp_ck.do_qcchecks(cf, ds4) # now do the meteorological driver gap filling # parse the control file for information on how the user wants to do the gap filling l4_info = pfp_gf.ParseL4ControlFile(cf, ds4) if ds4.returncodes["value"] != 0: return ds4 # *** start of the section that does the gap filling of the drivers *** # read the alternate data files ds_alt = pfp_gf.ReadAlternateFiles(ds4, l4_info) # fill short gaps using interpolation pfp_gf.GapFillUsingInterpolation(cf, ds4) # gap fill using climatology if "GapFillFromClimatology" in l4_info: pfp_gf.GapFillFromClimatology(ds4, l4_info, "GapFillFromClimatology") # do the gap filling using the ACCESS output if "GapFillFromAlternate" in l4_info: pfp_gfALT.GapFillFromAlternate(main_gui, ds4, ds_alt, l4_info, "GapFillFromAlternate") if ds4.returncodes["value"] != 0: return ds4 # merge the first group of gap filled drivers into a single series pfp_ts.MergeSeriesUsingDict(ds4, l4_info, merge_order="prerequisite") # re-calculate the ground heat flux but only if requested in control file opt = pfp_utils.get_keyvaluefromcf(cf,["Options"], "CorrectFgForStorage", default="No", mode="quiet") if opt.lower() != "no": pfp_ts.CorrectFgForStorage(cf, ds4, Fg_out='Fg', Fg_in='Fg_Av', Ts_in='Ts', Sws_in='Sws') # re-calculate the net radiation pfp_ts.CalculateNetRadiation(cf, ds4, Fn_out='Fn', Fsd_in='Fsd', Fsu_in='Fsu', Fld_in='Fld', Flu_in='Flu') # re-calculate the available energy pfp_ts.CalculateAvailableEnergy(ds4, Fa_out='Fa', Fn_in='Fn', Fg_in='Fg') # merge the second group of gap filled drivers into a single series pfp_ts.MergeSeriesUsingDict(ds4, l4_info, merge_order="standard") # re-calculate the water vapour concentrations pfp_ts.CalculateHumiditiesAfterGapFill(ds4, l4_info) # re-calculate the meteorological variables pfp_ts.CalculateMeteorologicalVariables(ds4, l4_info) # check for any missing data pfp_utils.get_missingingapfilledseries(ds4, l4_info) # write the percentage of good data as a variable attribute pfp_utils.get_coverage_individual(ds4) # write the percentage of good data for groups pfp_utils.get_coverage_groups(ds4) # remove intermediate series from the data structure pfp_ts.RemoveIntermediateSeries(ds4, l4_info) return ds4
def l4qc(cf, ds3): # !!! code here to use existing L4 file # logic # if the L4 doesn't exist # - create ds4 by using copy.deepcopy(ds3) # if the L4 does exist and the "UseExistingL4File" option is False # - create ds4 by using copy.deepcopy(ds3) # if the L4 does exist and the "UseExistingL4File" option is True # - read the contents of the L4 netCDF file # - check the start and end dates of the L3 and L4 data # - if these are the same then tell the user there is nothing to do # - copy the L3 data to the L4 data structure # - replace the L3 data with the L4 data #ds4 = copy.deepcopy(ds3) ds4 = pfp_io.copy_datastructure(cf, ds3) # ds4 will be empty (logical false) if an error occurs in copy_datastructure # return from this routine if this is the case if not ds4: return ds4 # set some attributes for this level pfp_utils.UpdateGlobalAttributes(cf, ds4, "L4") ds4.cf = cf ## calculate the available energy #if "Fa" not in ds4.series.keys(): #pfp_ts.CalculateAvailableEnergy(ds4,Fa_out='Fa',Fn_in='Fn',Fg_in='Fg') # create a dictionary to hold the gap filling data ds_alt = {} # check to see if we have any imports pfp_gf.ImportSeries(cf, ds4) # re-apply the quality control checks (range, diurnal and rules) pfp_ck.do_qcchecks(cf, ds4) # now do the meteorological driver gap filling for ThisOne in cf["Drivers"].keys(): if ThisOne not in ds4.series.keys(): logger.warning("Series " + ThisOne + " not in data structure") continue # parse the control file for information on how the user wants to do the gap filling pfp_gf.GapFillParseControlFile(cf, ds4, ThisOne, ds_alt) # *** start of the section that does the gap filling of the drivers *** # fill short gaps using interpolation pfp_gf.GapFillUsingInterpolation(cf, ds4) # gap fill using climatology pfp_gf.GapFillFromClimatology(ds4) # do the gap filling using the ACCESS output pfp_gfALT.GapFillFromAlternate(cf, ds4, ds_alt) if ds4.returncodes["alternate"] == "quit": return ds4 # gap fill using SOLO pfp_gfSOLO.GapFillUsingSOLO(cf, ds3, ds4) if ds4.returncodes["solo"] == "quit": return ds4 # merge the first group of gap filled drivers into a single series pfp_ts.MergeSeriesUsingDict(ds4, merge_order="prerequisite") # re-calculate the ground heat flux but only if requested in control file opt = pfp_utils.get_keyvaluefromcf(cf, ["Options"], "CorrectFgForStorage", default="No", mode="quiet") if opt.lower() != "no": pfp_ts.CorrectFgForStorage(cf, ds4, Fg_out='Fg', Fg_in='Fg_Av', Ts_in='Ts', Sws_in='Sws') # re-calculate the net radiation pfp_ts.CalculateNetRadiation(cf, ds4, Fn_out='Fn', Fsd_in='Fsd', Fsu_in='Fsu', Fld_in='Fld', Flu_in='Flu') # re-calculate the available energy pfp_ts.CalculateAvailableEnergy(ds4, Fa_out='Fa', Fn_in='Fn', Fg_in='Fg') # merge the second group of gap filled drivers into a single series pfp_ts.MergeSeriesUsingDict(ds4, merge_order="standard") # re-calculate the water vapour concentrations pfp_ts.CalculateHumiditiesAfterGapFill(ds4) # re-calculate the meteorological variables pfp_ts.CalculateMeteorologicalVariables(ds4) # the Tumba rhumba pfp_ts.CalculateComponentsFromWsWd(ds4) # check for any missing data pfp_utils.get_missingingapfilledseries(ds4) # write the percentage of good data as a variable attribute pfp_utils.get_coverage_individual(ds4) # write the percentage of good data for groups pfp_utils.get_coverage_groups(ds4) return ds4
def l3qc(cf, ds2): """ """ # make a copy of the L2 data ds3 = copy.deepcopy(ds2) # set some attributes for this level pfp_utils.UpdateGlobalAttributes(cf, ds3, "L3") # put the control file name into the global attributes ds3.globalattributes['controlfile_name'] = cf['controlfile_name'] # check to see if we have any imports pfp_gf.ImportSeries(cf, ds3) # apply linear corrections to the data pfp_ck.do_linear(cf, ds3) # ************************ # *** Merge humidities *** # ************************ # merge whatever humidities are available pfp_ts.MergeHumidities(cf, ds3, convert_units=True) # ************************** # *** Merge temperatures *** # ************************** # get the air temperature from the CSAT virtual temperature pfp_ts.TaFromTv(cf, ds3) # merge the HMP and corrected CSAT data pfp_ts.MergeSeries(cf, ds3, "Ta", convert_units=True) pfp_utils.CheckUnits(ds3, "Ta", "C", convert_units=True) # *************************** # *** Calcuate humidities *** # *************************** # calculate humidities (absolute, specific and relative) from whatever is available pfp_ts.CalculateHumidities(ds3) # ******************************** # *** Merge CO2 concentrations *** # ******************************** # merge the 7500 CO2 concentration # PRI 09/08/2017 possibly the ugliest thing I have done yet # This needs to be abstracted to a general alias checking routine at the # start of the L3 processing so that possible aliases are mapped to a single # set of variable names. if "CO2" in cf["Variables"]: CO2 = "CO2" elif "Cc" in cf["Variables"]: CO2 = "Cc" else: msg = "Label for CO2 ('CO2','Cc') not found in control file" logger.error(msg) return pfp_ts.MergeSeries(cf, ds3, CO2, convert_units=True) # ****************************************** # *** Calculate meteorological variables *** # ****************************************** # Update meteorological variables pfp_ts.CalculateMeteorologicalVariables(ds3) # ************************************************* # *** Calculate fluxes from covariances section *** # ************************************************* # check to see if the user wants to use the fluxes in the L2 file if not pfp_utils.cfoptionskeylogical(cf, Key="UseL2Fluxes", default=False): # check the covariance units and change if necessary pfp_ts.CheckCovarianceUnits(ds3) # do the 2D coordinate rotation pfp_ts.CoordRotation2D(cf, ds3) # do the Massman frequency attenuation correction pfp_ts.MassmanStandard(cf, ds3) # calculate the fluxes pfp_ts.CalculateFluxes(cf, ds3) # approximate wT from virtual wT using wA (ref: Campbell OPECSystem manual) pfp_ts.FhvtoFh(cf, ds3) # correct the H2O & CO2 flux due to effects of flux on density measurements pfp_ts.Fe_WPL(cf, ds3) pfp_ts.Fc_WPL(cf, ds3) # ************************************** # *** Calculate Monin-Obukhov length *** # ************************************** pfp_ts.CalculateMoninObukhovLength(ds3) # ************************** # *** CO2 and Fc section *** # ************************** # convert CO2 units if required pfp_utils.ConvertCO2Units(cf, ds3, CO2=CO2) # calculate Fc storage term - single height only at present pfp_ts.CalculateFcStorageSinglePoint(cf, ds3, Fc_out='Fc_single', CO2_in=CO2) # convert Fc and Fc_storage units if required pfp_utils.ConvertFcUnits(cf, ds3) # merge Fc and Fc_storage series if required merge_list = [ label for label in cf["Variables"].keys() if label[0:2] == "Fc" and "MergeSeries" in cf["Variables"][label].keys() ] for label in merge_list: pfp_ts.MergeSeries(cf, ds3, label, save_originals=True) # correct Fc for storage term - only recommended if storage calculated from profile available pfp_ts.CorrectFcForStorage(cf, ds3) # ************************* # *** Radiation section *** # ************************* # merge the incoming shortwave radiation pfp_ts.MergeSeries(cf, ds3, 'Fsd') # calculate the net radiation from the Kipp and Zonen CNR1 pfp_ts.CalculateNetRadiation(cf, ds3, Fn_out='Fn_KZ', Fsd_in='Fsd', Fsu_in='Fsu', Fld_in='Fld', Flu_in='Flu') pfp_ts.MergeSeries(cf, ds3, 'Fn') # **************************************** # *** Wind speed and direction section *** # **************************************** # combine wind speed from the Wind Sentry and the SONIC pfp_ts.MergeSeries(cf, ds3, 'Ws') # combine wind direction from the Wind Sentry and the SONIC pfp_ts.MergeSeries(cf, ds3, 'Wd') # ******************** # *** Soil section *** # ******************** # correct soil heat flux for storage # ... either average the raw ground heat flux, soil temperature and moisture # and then do the correction (OzFlux "standard") pfp_ts.AverageSeriesByElements(cf, ds3, 'Ts') pfp_ts.AverageSeriesByElements(cf, ds3, 'Sws') if pfp_utils.cfoptionskeylogical(cf, Key='CorrectIndividualFg'): # ... or correct the individual ground heat flux measurements (James' method) pfp_ts.CorrectIndividualFgForStorage(cf, ds3) pfp_ts.AverageSeriesByElements(cf, ds3, 'Fg') else: pfp_ts.AverageSeriesByElements(cf, ds3, 'Fg') pfp_ts.CorrectFgForStorage(cf, ds3, Fg_out='Fg', Fg_in='Fg', Ts_in='Ts', Sws_in='Sws') # calculate the available energy pfp_ts.CalculateAvailableEnergy(ds3, Fa_out='Fa', Fn_in='Fn', Fg_in='Fg') # create new series using MergeSeries or AverageSeries pfp_ck.CreateNewSeries(cf, ds3) # re-apply the quality control checks (range, diurnal and rules) pfp_ck.do_qcchecks(cf, ds3) # coordinate gaps in the three main fluxes pfp_ck.CoordinateFluxGaps(cf, ds3) # coordinate gaps in Ah_7500_Av with Fc pfp_ck.CoordinateAh7500AndFcGaps(cf, ds3) # check missing data and QC flags are consistent pfp_utils.CheckQCFlags(ds3) # get the statistics for the QC flags and write these to an Excel spreadsheet pfp_io.get_seriesstats(cf, ds3) # write the percentage of good data as a variable attribute pfp_utils.get_coverage_individual(ds3) # write the percentage of good data for groups pfp_utils.get_coverage_groups(ds3) return ds3
def climatology(cf): nc_filename = pfp_io.get_infilenamefromcf(cf) if not pfp_utils.file_exists(nc_filename): return xl_filename = nc_filename.replace(".nc","_Climatology.xls") xlFile = xlwt.Workbook() ds = pfp_io.nc_read_series(nc_filename) # calculate Fa if it is not in the data structure got_Fa = True if "Fa" not in ds.series.keys(): if "Fn" in ds.series.keys() and "Fg" in ds.series.keys(): pfp_ts.CalculateAvailableEnergy(ds,Fa_out='Fa',Fn_in='Fn',Fg_in='Fg') else: got_Fa = False logger.warning(" Fn or Fg not in data struicture") # get the time step ts = int(ds.globalattributes['time_step']) # get the site name SiteName = ds.globalattributes['site_name'] # get the datetime series dt = ds.series['DateTime']['Data'] Hdh = numpy.array([(d.hour + d.minute/float(60)) for d in dt]) Month = numpy.array([d.month for d in dt]) # get the initial start and end dates StartDate = str(dt[0]) EndDate = str(dt[-1]) # find the start index of the first whole day (time=00:30) si = pfp_utils.GetDateIndex(dt,StartDate,ts=ts,default=0,match='startnextday') # find the end index of the last whole day (time=00:00) ei = pfp_utils.GetDateIndex(dt,EndDate,ts=ts,default=-1,match='endpreviousday') # get local views of the datetime series ldt = dt[si:ei+1] Hdh = Hdh[si:ei+1] Month = Month[si:ei+1] # get the number of time steps in a day and the number of days in the data ntsInDay = int(24.0*60.0/float(ts)) nDays = int(len(ldt))/ntsInDay for ThisOne in cf['Variables'].keys(): if "AltVarName" in cf['Variables'][ThisOne].keys(): ThisOne = cf['Variables'][ThisOne]["AltVarName"] if ThisOne in ds.series.keys(): logger.info(" Doing climatology for "+ThisOne) data,f,a = pfp_utils.GetSeriesasMA(ds,ThisOne,si=si,ei=ei) if numpy.ma.count(data)==0: logger.warning(" No data for "+ThisOne+", skipping ...") continue fmt_str = get_formatstring(cf,ThisOne,fmt_def='') xlSheet = xlFile.add_sheet(ThisOne) Av_all = do_diurnalstats(Month,Hdh,data,xlSheet,format_string=fmt_str,ts=ts) # now do it for each day # we want to preserve any data that has been truncated by the use of the "startnextday" # and "endpreviousday" match options used above. Here we revisit the start and end indices # and adjust these backwards and forwards respectively if data has been truncated. nDays_daily = nDays ei_daily = ei si_daily = si sdate = ldt[0] edate = ldt[-1] # is there data after the current end date? if dt[-1]>ldt[-1]: # if so, push the end index back by 1 day so it is included ei_daily = ei + ntsInDay nDays_daily = nDays_daily + 1 edate = ldt[-1]+datetime.timedelta(days=1) # is there data before the current start date? if dt[0]<ldt[0]: # if so, push the start index back by 1 day so it is included si_daily = si - ntsInDay nDays_daily = nDays_daily + 1 sdate = ldt[0]-datetime.timedelta(days=1) # get the data and use the "pad" option to add missing data if required to # complete the extra days data,f,a = pfp_utils.GetSeriesasMA(ds,ThisOne,si=si_daily,ei=ei_daily,mode="pad") data_daily = data.reshape(nDays_daily,ntsInDay) xlSheet = xlFile.add_sheet(ThisOne+'(day)') write_data_1columnpertimestep(xlSheet, data_daily, ts, startdate=sdate, format_string=fmt_str) data_daily_i = do_2dinterpolation(data_daily) xlSheet = xlFile.add_sheet(ThisOne+'i(day)') write_data_1columnpertimestep(xlSheet, data_daily_i, ts, startdate=sdate, format_string=fmt_str) else: logger.warning(" Requested variable "+ThisOne+" not in data structure") continue logger.info(" Saving Excel file "+os.path.split(xl_filename)[1]) xlFile.save(xl_filename)
def climatology(cf): nc_filename = pfp_io.get_infilenamefromcf(cf) if not pfp_utils.file_exists(nc_filename): return xl_filename = nc_filename.replace(".nc", "_Climatology.xls") xlFile = xlwt.Workbook() ds = pfp_io.nc_read_series(nc_filename) # calculate Fa if it is not in the data structure got_Fa = True if "Fa" not in ds.series.keys(): if "Fn" in ds.series.keys() and "Fg" in ds.series.keys(): pfp_ts.CalculateAvailableEnergy(ds, Fa_out='Fa', Fn_in='Fn', Fg_in='Fg') else: got_Fa = False logger.warning(" Fn or Fg not in data struicture") # get the time step ts = int(ds.globalattributes['time_step']) # get the site name SiteName = ds.globalattributes['site_name'] # get the datetime series dt = ds.series['DateTime']['Data'] Hdh = numpy.array([(d.hour + d.minute / float(60)) for d in dt]) Month = numpy.array([d.month for d in dt]) # get the initial start and end dates StartDate = str(dt[0]) EndDate = str(dt[-1]) # find the start index of the first whole day (time=00:30) si = pfp_utils.GetDateIndex(dt, StartDate, ts=ts, default=0, match='startnextday') # find the end index of the last whole day (time=00:00) ei = pfp_utils.GetDateIndex(dt, EndDate, ts=ts, default=-1, match='endpreviousday') # get local views of the datetime series ldt = dt[si:ei + 1] Hdh = Hdh[si:ei + 1] Month = Month[si:ei + 1] # get the number of time steps in a day and the number of days in the data ntsInDay = int(24.0 * 60.0 / float(ts)) nDays = int(len(ldt)) / ntsInDay for ThisOne in cf['Variables'].keys(): if "AltVarName" in cf['Variables'][ThisOne].keys(): ThisOne = cf['Variables'][ThisOne]["AltVarName"] if ThisOne in ds.series.keys(): logger.info(" Doing climatology for " + ThisOne) data, f, a = pfp_utils.GetSeriesasMA(ds, ThisOne, si=si, ei=ei) if numpy.ma.count(data) == 0: logger.warning(" No data for " + ThisOne + ", skipping ...") continue fmt_str = get_formatstring(cf, ThisOne, fmt_def='') xlSheet = xlFile.add_sheet(ThisOne) Av_all = do_diurnalstats(Month, Hdh, data, xlSheet, format_string=fmt_str, ts=ts) # now do it for each day # we want to preserve any data that has been truncated by the use of the "startnextday" # and "endpreviousday" match options used above. Here we revisit the start and end indices # and adjust these backwards and forwards respectively if data has been truncated. nDays_daily = nDays ei_daily = ei si_daily = si sdate = ldt[0] edate = ldt[-1] # is there data after the current end date? if dt[-1] > ldt[-1]: # if so, push the end index back by 1 day so it is included ei_daily = ei + ntsInDay nDays_daily = nDays_daily + 1 edate = ldt[-1] + datetime.timedelta(days=1) # is there data before the current start date? if dt[0] < ldt[0]: # if so, push the start index back by 1 day so it is included si_daily = si - ntsInDay nDays_daily = nDays_daily + 1 sdate = ldt[0] - datetime.timedelta(days=1) # get the data and use the "pad" option to add missing data if required to # complete the extra days data, f, a = pfp_utils.GetSeriesasMA(ds, ThisOne, si=si_daily, ei=ei_daily, mode="pad") data_daily = data.reshape(nDays_daily, ntsInDay) xlSheet = xlFile.add_sheet(ThisOne + '(day)') write_data_1columnpertimestep(xlSheet, data_daily, ts, startdate=sdate, format_string=fmt_str) data_daily_i = do_2dinterpolation(data_daily) xlSheet = xlFile.add_sheet(ThisOne + 'i(day)') write_data_1columnpertimestep(xlSheet, data_daily_i, ts, startdate=sdate, format_string=fmt_str) elif ThisOne == "EF" and got_Fa: logger.info(" Doing evaporative fraction") EF = numpy.ma.zeros([48, 12]) + float(c.missing_value) Hdh, f, a = pfp_utils.GetSeriesasMA(ds, 'Hdh', si=si, ei=ei) Fa, f, a = pfp_utils.GetSeriesasMA(ds, 'Fa', si=si, ei=ei) Fe, f, a = pfp_utils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei) for m in range(1, 13): mi = numpy.where(Month == m)[0] Fa_Num, Hr, Fa_Av, Sd, Mx, Mn = get_diurnalstats( Hdh[mi], Fa[mi], ts) Fe_Num, Hr, Fe_Av, Sd, Mx, Mn = get_diurnalstats( Hdh[mi], Fe[mi], ts) index = numpy.ma.where((Fa_Num > 4) & (Fe_Num > 4)) EF[:, m - 1][index] = Fe_Av[index] / Fa_Av[index] # reject EF values greater than upper limit or less than lower limit upr, lwr = get_rangecheck_limit(cf, 'EF') EF = numpy.ma.filled( numpy.ma.masked_where((EF > upr) | (EF < lwr), EF), float(c.missing_value)) # write the EF to the Excel file xlSheet = xlFile.add_sheet('EF') write_data_1columnpermonth(xlSheet, EF, ts, format_string='0.00') # do the 2D interpolation to fill missing EF values EFi = do_2dinterpolation(EF) xlSheet = xlFile.add_sheet('EFi') write_data_1columnpermonth(xlSheet, EFi, ts, format_string='0.00') # now do EF for each day Fa, f, a = pfp_utils.GetSeriesasMA(ds, 'Fa', si=si, ei=ei) Fe, f, a = pfp_utils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei) EF = Fe / Fa EF = numpy.ma.filled( numpy.ma.masked_where((EF > upr) | (EF < lwr), EF), float(c.missing_value)) EF_daily = EF.reshape(nDays, ntsInDay) xlSheet = xlFile.add_sheet('EF(day)') write_data_1columnpertimestep(xlSheet, EF_daily, ts, startdate=ldt[0], format_string='0.00') EFi = do_2dinterpolation(EF_daily) xlSheet = xlFile.add_sheet('EFi(day)') write_data_1columnpertimestep(xlSheet, EFi, ts, startdate=ldt[0], format_string='0.00') elif ThisOne == "BR": logger.info(" Doing Bowen ratio") BR = numpy.ma.zeros([48, 12]) + float(c.missing_value) Fe, f, a = pfp_utils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei) Fh, f, a = pfp_utils.GetSeriesasMA(ds, 'Fh', si=si, ei=ei) for m in range(1, 13): mi = numpy.where(Month == m)[0] Fh_Num, Hr, Fh_Av, Sd, Mx, Mn = get_diurnalstats( Hdh[mi], Fh[mi], ts) Fe_Num, Hr, Fe_Av, Sd, Mx, Mn = get_diurnalstats( Hdh[mi], Fe[mi], ts) index = numpy.ma.where((Fh_Num > 4) & (Fe_Num > 4)) BR[:, m - 1][index] = Fh_Av[index] / Fe_Av[index] # reject BR values greater than upper limit or less than lower limit upr, lwr = get_rangecheck_limit(cf, 'BR') BR = numpy.ma.filled( numpy.ma.masked_where((BR > upr) | (BR < lwr), BR), float(c.missing_value)) # write the BR to the Excel file xlSheet = xlFile.add_sheet('BR') write_data_1columnpermonth(xlSheet, BR, ts, format_string='0.00') # do the 2D interpolation to fill missing EF values BRi = do_2dinterpolation(BR) xlSheet = xlFile.add_sheet('BRi') write_data_1columnpermonth(xlSheet, BRi, ts, format_string='0.00') # now do BR for each day ... Fe, f, a = pfp_utils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei) Fh, f, a = pfp_utils.GetSeriesasMA(ds, 'Fh', si=si, ei=ei) BR = Fh / Fe BR = numpy.ma.filled( numpy.ma.masked_where((BR > upr) | (BR < lwr), BR), float(c.missing_value)) BR_daily = BR.reshape(nDays, ntsInDay) xlSheet = xlFile.add_sheet('BR(day)') write_data_1columnpertimestep(xlSheet, BR_daily, ts, startdate=ldt[0], format_string='0.00') BRi = do_2dinterpolation(BR_daily) xlSheet = xlFile.add_sheet('BRi(day)') write_data_1columnpertimestep(xlSheet, BRi, ts, startdate=ldt[0], format_string='0.00') elif ThisOne == "WUE": logger.info(" Doing ecosystem WUE") WUE = numpy.ma.zeros([48, 12]) + float(c.missing_value) Fe, f, a = pfp_utils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei) Fc, f, a = pfp_utils.GetSeriesasMA(ds, 'Fc', si=si, ei=ei) for m in range(1, 13): mi = numpy.where(Month == m)[0] Fc_Num, Hr, Fc_Av, Sd, Mx, Mn = get_diurnalstats( Hdh[mi], Fc[mi], ts) Fe_Num, Hr, Fe_Av, Sd, Mx, Mn = get_diurnalstats( Hdh[mi], Fe[mi], ts) index = numpy.ma.where((Fc_Num > 4) & (Fe_Num > 4)) WUE[:, m - 1][index] = Fc_Av[index] / Fe_Av[index] # reject WUE values greater than upper limit or less than lower limit upr, lwr = get_rangecheck_limit(cf, 'WUE') WUE = numpy.ma.filled( numpy.ma.masked_where((WUE > upr) | (WUE < lwr), WUE), float(c.missing_value)) # write the WUE to the Excel file xlSheet = xlFile.add_sheet('WUE') write_data_1columnpermonth(xlSheet, WUE, ts, format_string='0.00000') # do the 2D interpolation to fill missing EF values WUEi = do_2dinterpolation(WUE) xlSheet = xlFile.add_sheet('WUEi') write_data_1columnpermonth(xlSheet, WUEi, ts, format_string='0.00000') # now do WUE for each day ... Fe, f, a = pfp_utils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei) Fc, f, a = pfp_utils.GetSeriesasMA(ds, 'Fc', si=si, ei=ei) WUE = Fc / Fe WUE = numpy.ma.filled( numpy.ma.masked_where((WUE > upr) | (WUE < lwr), WUE), float(c.missing_value)) WUE_daily = WUE.reshape(nDays, ntsInDay) xlSheet = xlFile.add_sheet('WUE(day)') write_data_1columnpertimestep(xlSheet, WUE_daily, ts, startdate=ldt[0], format_string='0.00000') WUEi = do_2dinterpolation(WUE_daily) xlSheet = xlFile.add_sheet('WUEi(day)') write_data_1columnpertimestep(xlSheet, WUEi, ts, startdate=ldt[0], format_string='0.00000') else: logger.warning(" Requested variable " + ThisOne + " not in data structure") continue logger.info(" Saving Excel file " + os.path.split(xl_filename)[1]) xlFile.save(xl_filename)
def l3qc(cf, ds2): """ """ # make a copy of the L2 data ds3 = copy.deepcopy(ds2) # set some attributes for this level pfp_utils.UpdateGlobalAttributes(cf, ds3, "L3") # check to see if we have any imports pfp_gf.ImportSeries(cf,ds3) # apply linear corrections to the data pfp_ck.do_linear(cf,ds3) # parse the control file for information on how the user wants to do the gap filling l3_info = pfp_compliance.ParseL3ControlFile(cf, ds3) if l3_info["status"]["value"] != 0: logger.error(l3_info["status"]["message"]) return ds3 # ************************ # *** Merge humidities *** # ************************ # merge whatever humidities are available pfp_ts.MergeHumidities(cf, ds3, convert_units=True) # ************************** # *** Merge temperatures *** # ************************** # get the air temperature from the CSAT virtual temperature pfp_ts.TaFromTv(cf, ds3) # merge the HMP and corrected CSAT data pfp_ts.CombineSeries(cf, ds3, "Ta", convert_units=True) pfp_utils.CheckUnits(ds3, "Ta", "degC", convert_units=True) # *************************** # *** Calcuate humidities *** # *************************** # calculate humidities (absolute, specific and relative) from whatever is available pfp_ts.CalculateHumidities(ds3) # ******************************** # *** Merge CO2 concentrations *** # ******************************** # merge the CO2 concentration pfp_ts.CombineSeries(cf, ds3, l3_info["CO2"]["label"], convert_units=True) # ****************************************** # *** Calculate meteorological variables *** # ****************************************** # Update meteorological variables pfp_ts.CalculateMeteorologicalVariables(ds3, l3_info) # ************************************************* # *** Calculate fluxes from covariances section *** # ************************************************* # check to see if the user wants to use the fluxes in the L2 file if not pfp_utils.get_optionskeyaslogical(cf, "UseL2Fluxes", default=False): # check the covariance units and change if necessary pfp_ts.CheckCovarianceUnits(ds3) # do the 2D coordinate rotation pfp_ts.CoordRotation2D(cf, ds3) # do the Massman frequency attenuation correction pfp_ts.MassmanStandard(cf, ds3) # calculate the fluxes pfp_ts.CalculateFluxes(cf, ds3) # approximate wT from virtual wT using wA (ref: Campbell OPECSystem manual) pfp_ts.FhvtoFh(cf, ds3) # correct the H2O & CO2 flux due to effects of flux on density measurements if pfp_ts.Fe_WPL(cf, ds3): return ds3 if pfp_ts.Fco2_WPL(cf, ds3): return ds3 # ************************** # *** CO2 and Fc section *** # ************************** # convert CO2 units if required pfp_utils.ConvertCO2Units(cf, ds3) # calculate Fco2 storage term - single height only at present pfp_ts.CalculateFco2StorageSinglePoint(cf, ds3, l3_info["CO2"]["label"]) # convert Fco2 units if required pfp_utils.ConvertFco2Units(cf, ds3) # merge Fco2 and Fco2_storage series if required pfp_ts.CombineSeries(cf, ds3, l3_info["Fco2"]["combine_list"], save_originals=True) # correct Fco2 for storage term - only recommended if storage calculated from profile available pfp_ts.CorrectFco2ForStorage(cf, ds3) # ************************* # *** Radiation section *** # ************************* # merge the incoming shortwave radiation pfp_ts.CombineSeries(cf, ds3, "Fsd") # calculate the net radiation from the Kipp and Zonen CNR1 pfp_ts.CalculateNetRadiation(cf, ds3) pfp_ts.CombineSeries(cf, ds3, "Fn") # **************************************** # *** Wind speed and direction section *** # **************************************** # combine wind speed from the Wind Sentry and the SONIC pfp_ts.CombineSeries(cf,ds3, "Ws") # combine wind direction from the Wind Sentry and the SONIC pfp_ts.CombineSeries(cf,ds3, "Wd") # ******************** # *** Soil section *** # ******************** # correct soil heat flux for storage # ... either average the raw ground heat flux, soil temperature and moisture # and then do the correction (OzFlux "standard") pfp_ts.CombineSeries(cf, ds3, "Ts") pfp_ts.CombineSeries(cf, ds3, "Sws") if pfp_utils.get_optionskeyaslogical(cf, "CorrectIndividualFg"): # ... or correct the individual ground heat flux measurements (James' method) pfp_ts.CorrectIndividualFgForStorage(cf, ds3) pfp_ts.CombineSeries(cf, ds3, "Fg") else: pfp_ts.CombineSeries(cf, ds3, "Fg") pfp_ts.CorrectFgForStorage(cf, ds3) # calculate the available energy pfp_ts.CalculateAvailableEnergy(ds3) # create new series using MergeSeries or AverageSeries pfp_ck.CreateNewSeries(cf, ds3) # Calculate Monin-Obukhov length pfp_ts.CalculateMoninObukhovLength(ds3) # re-apply the quality control checks (range, diurnal and rules) pfp_ck.do_qcchecks(cf, ds3) # check missing data and QC flags are consistent pfp_utils.CheckQCFlags(ds3) # get the statistics for the QC flags and write these to an Excel spreadsheet pfp_io.get_seriesstats(cf, ds3) # write the percentage of good data as a variable attribute pfp_utils.get_coverage_individual(ds3) # write the percentage of good data for groups pfp_utils.get_coverage_groups(ds3) # remove intermediate series from the data structure pfp_ts.RemoveIntermediateSeries(ds3, l3_info) return ds3