Example #1
0
 def do_plotL3L4(self):
     """
         Plot L3 (QA/QC and Corrected) and L4 (Gap Filled) data in blue and
             red, respectively
         
         Control File for do_l4qc function used.
         If L4 Control File not loaded, requires control file selection.
         """
     if 'ds3' not in dir(self) or 'ds4' not in dir(self):
         self.cf = qcio.load_controlfile(path='controlfiles')
         if len(self.cf)==0:
             self.do_progress(text='Waiting for input ...')
             return
         l3filename = qcio.get_infilenamefromcf(self.cf)
         if not qcutils.file_exists(l3filename): self.do_progress(text='An error occurred, check the console ...'); return
         self.ds3 = qcio.nc_read_series(l3filename)
         if len(self.ds3.series.keys())==0: self.do_progress(text='An error occurred, check the console ...'); del self.ds3; return
         l4filename = qcio.get_outfilenamefromcf(self.cf)
         self.ds4 = qcio.nc_read_series(l4filename)
         if len(self.ds4.series.keys())==0: self.do_progress(text='An error occurred, check the console ...'); del self.ds4; return
         self.update_startenddate(str(self.ds3.series['DateTime']['Data'][0]),
                                  str(self.ds3.series['DateTime']['Data'][-1]))
     self.do_progress(text='Plotting L3 and L4 QC ...')
     cfname = self.ds4.globalattributes['controlfile_name']
     self.cf = qcio.get_controlfilecontents(cfname)
     for nFig in self.cf['Plots'].keys():
         si = qcutils.GetDateIndex(self.ds3.series['DateTime']['Data'],self.plotstartEntry.get(),
                                   ts=self.ds3.globalattributes['time_step'],default=0,match='exact')
         ei = qcutils.GetDateIndex(self.ds3.series['DateTime']['Data'],self.plotendEntry.get(),
                                   ts=self.ds3.globalattributes['time_step'],default=-1,match='exact')
         qcplot.plottimeseries(self.cf,nFig,self.ds3,self.ds4,si,ei)
     self.do_progress(text='Finished plotting L4')
     logging.info(' Finished plotting L4, check the GUI')
Example #2
0
 def do_l6qc(self):
     """
         Call qcls.l6qc function to partition NEE into GPP and ER.
     """
     logging.info(" Starting L6 processing ...")
     cf = qcio.load_controlfile(path='controlfiles')
     if len(cf)==0: self.do_progress(text='Waiting for input ...'); return
     infilename = qcio.get_infilenamefromcf(cf)
     if len(infilename)==0: self.do_progress(text='An error occurred, check the console ...'); return
     if not qcutils.file_exists(infilename): self.do_progress(text='An error occurred, check the console ...'); return
     ds5 = qcio.nc_read_series(infilename)
     if len(ds5.series.keys())==0: self.do_progress(text='An error occurred, check the console ...'); del ds5; return
     ds5.globalattributes['controlfile_name'] = cf['controlfile_name']
     self.update_startenddate(str(ds5.series['DateTime']['Data'][0]),
                              str(ds5.series['DateTime']['Data'][-1]))
     sitename = ds5.globalattributes['site_name']
     self.do_progress(text='Doing L6 partitioning: '+sitename+' ...')
     if "Options" not in cf: cf["Options"]={}
     cf["Options"]["call_mode"] = "interactive"
     ds6 = qcls.l6qc(cf,ds5)
     self.do_progress(text='Finished L6: '+sitename)
     logging.info(' Finished L6: '+sitename)
     self.do_progress(text='Saving L6 partitioned data ...')           # put up the progress message
     outfilename = qcio.get_outfilenamefromcf(cf)
     if len(outfilename)==0: self.do_progress(text='An error occurred, check the console ...'); return
     ncFile = qcio.nc_open_write(outfilename)
     outputlist = qcio.get_outputlistfromcf(cf,'nc')
     qcio.nc_write_series(ncFile,ds6,outputlist=outputlist)             # save the L6 data
     self.do_progress(text='Finished saving L6 partitioned data')      # tell the user we are done
     logging.info(' Finished saving L6 partitioned data')
     logging.info("")
Example #3
0
 def do_l4qc(self):
     """
         Call qcls.l4qc_gapfill function
         Performs L4 gap filling on L3 met data
         or
         Ingests L4 gap filled fluxes performed in external SOLO-ANN and c
             omputes daily sums
         Outputs L4 netCDF file to ncData folder
         Outputs L4 netCDF file to OzFlux folder
         
         ControlFiles:
             L4_year.txt
             or
             L4b.txt
         
         ControlFile contents (see ControlFile/Templates/L4.txt and
         ControlFile/Templates/L4b.txt for examples):
             [General]:
                 Python control parameters (SOLO)
                 Site characteristics parameters (Gap filling)
             [Files]:
                 L3 input file name and path (Gap filling)
                 L4 input file name and path (SOLO)
                 L4 output file name and ncData folder path (both)
                 L4 OzFlux output file name and OzFlux folder path
             [Variables]:
                 Variable subset list for OzFlux output file (where
                     available)
         """
     logging.info(" Starting L4 processing ...")
     cf = qcio.load_controlfile(path='controlfiles')
     if len(cf)==0: self.do_progress(text='Waiting for input ...'); return
     infilename = qcio.get_infilenamefromcf(cf)
     if len(infilename)==0: self.do_progress(text='An error occurred, check the console ...'); return
     if not qcutils.file_exists(infilename): self.do_progress(text='An error occurred, check the console ...'); return
     ds3 = qcio.nc_read_series(infilename)
     if len(ds3.series.keys())==0: self.do_progress(text='An error occurred, check the console ...'); del ds3; return
     ds3.globalattributes['controlfile_name'] = cf['controlfile_name']
     self.update_startenddate(str(ds3.series['DateTime']['Data'][0]),
                              str(ds3.series['DateTime']['Data'][-1]))
     sitename = ds3.globalattributes['site_name']
     self.do_progress(text='Doing L4 gap filling drivers: '+sitename+' ...')
     if "Options" not in cf: cf["Options"]={}
     cf["Options"]["call_mode"] = "interactive"
     ds4 = qcls.l4qc(cf,ds3)
     if ds4.returncodes["alternate"]=="quit" or ds4.returncodes["solo"]=="quit":
         self.do_progress(text='Quitting L4: '+sitename)
         logging.info(' Quitting L4: '+sitename)
     else:
         self.do_progress(text='Finished L4: '+sitename)
         logging.info(' Finished L4: '+sitename)
         self.do_progress(text='Saving L4 gap filled data ...')         # put up the progress message
         outfilename = qcio.get_outfilenamefromcf(cf)
         if len(outfilename)==0: self.do_progress(text='An error occurred, check the console ...'); return
         ncFile = qcio.nc_open_write(outfilename)
         outputlist = qcio.get_outputlistfromcf(cf,'nc')
         qcio.nc_write_series(ncFile,ds4,outputlist=outputlist)         # save the L4 data
         self.do_progress(text='Finished saving L4 gap filled data')    # tell the user we are done
         logging.info(' Finished saving L4 gap filled data')
     logging.info("")        
Example #4
0
def do_audit_analysis(base_path):
    sites = sorted(os.listdir(base_path))
    for item in sites:
        if not os.path.isdir(os.path.join(base_path, item)):
            sites.remove(item)
    
    site_info = OrderedDict()
    all_sites = {"start_date":datetime.datetime(3000,1,1,0,0),
                 "end_date": datetime.datetime(2000,1,1,0,0)}
    n = 0
    for site in sites:
        portal_dir = os.path.join(base_path, site, "Data", "Portal")
        l3_name = os.path.join(portal_dir, site + "_L3.nc")
        if os.path.isfile(l3_name):
            print "Processing ", site
            site_info[site] = {"file_name":l3_name}
            ds = qcio.nc_read_series(l3_name)
            site_info[site]["site_name"] = ds.globalattributes["site_name"]
            start_date = dateutil.parser.parse(ds.globalattributes["start_date"])
            site_info[site]["start_date"] = start_date
            end_date = dateutil.parser.parse(ds.globalattributes["end_date"])
            site_info[site]["end_date"] = end_date
            site_info[site]["X"] = numpy.array([start_date, end_date])
            site_info[site]["Y"] = numpy.array([n+1, n+1])
            n = n + 1
            all_sites["start_date"] = min([all_sites["start_date"], site_info[site]["start_date"]])
            all_sites["end_date"] = max([all_sites["end_date"], site_info[site]["end_date"]])
    
    with open('audit_analysis.pickle', 'wb') as handle:
        pickle.dump([all_sites, site_info], handle, protocol=pickle.HIGHEST_PROTOCOL)

    return all_sites, site_info
Example #5
0
def mpt_main(cf):
    base_file_path = cf["Files"]["file_path"]
    nc_file_name = cf["Files"]["in_filename"]
    nc_file_path = os.path.join(base_file_path, nc_file_name)
    ds = qcio.nc_read_series(nc_file_path)
    out_file_paths = run_mpt_code(ds, nc_file_name)
    ustar_results = read_mpt_output(out_file_paths)
    mpt_file_path = nc_file_path.replace(".nc", "_MPT.xls")
    xl_write_mpt(mpt_file_path, ustar_results)
    return
Example #6
0
 def do_plotL1L2(self):
     """
         Plot L1 (raw) and L2 (QA/QC) data in blue and red, respectively
         
         Control File for do_l2qc function used.
         If L2 Control File not loaded, requires control file selection.
         """
     if 'ds1' not in dir(self) or 'ds2' not in dir(self):
         self.cf = qcio.load_controlfile(path='controlfiles')
         if len(self.cf)==0: self.do_progress(text='Waiting for input ...'); return
         l1filename = qcio.get_infilenamefromcf(self.cf)
         if not qcutils.file_exists(l1filename): self.do_progress(text='An error occurred, check the console ...'); return
         self.ds1 = qcio.nc_read_series(l1filename)
         if len(self.ds1.series.keys())==0: self.do_progress(text='An error occurred, check the console ...'); del self.ds1; return
         l2filename = qcio.get_outfilenamefromcf(self.cf)
         self.ds2 = qcio.nc_read_series(l2filename)
         if len(self.ds2.series.keys())==0: self.do_progress(text='An error occurred, check the console ...'); del self.ds2; return
         self.update_startenddate(str(self.ds1.series['DateTime']['Data'][0]),
                                  str(self.ds1.series['DateTime']['Data'][-1]))
     self.do_progress(text='Plotting L1 & L2 QC ...')
     cfname = self.ds2.globalattributes['controlfile_name']
     self.cf = qcio.get_controlfilecontents(cfname)
     for nFig in self.cf['Plots'].keys():
         si = qcutils.GetDateIndex(self.ds1.series['DateTime']['Data'],self.plotstartEntry.get(),
                                   ts=self.ds1.globalattributes['time_step'],default=0,match='exact')
         ei = qcutils.GetDateIndex(self.ds1.series['DateTime']['Data'],self.plotendEntry.get(),
                                   ts=self.ds1.globalattributes['time_step'],default=-1,match='exact')
         plt_cf = self.cf['Plots'][str(nFig)]
         if 'Type' in plt_cf.keys():
             if str(plt_cf['Type']).lower() =='xy':
                 self.do_progress(text='Plotting L1 and L2 XY ...')
                 qcplot.plotxy(self.cf,nFig,plt_cf,self.ds1,self.ds2,si,ei)
             else:
                 self.do_progress(text='Plotting L1 and L2 QC ...')
                 qcplot.plottimeseries(self.cf,nFig,self.ds1,self.ds2,si,ei)
         else:
             self.do_progress(text='Plotting L1 and L2 QC ...')
             qcplot.plottimeseries(self.cf,nFig,self.ds1,self.ds2,si,ei)
     self.do_progress(text='Finished plotting L1 and L2')
     logging.info(' Finished plotting L1 and L2, check the GUI')
Example #7
0
 def do_l2qc(self):
     """
         Call qcls.l2qc function
         Performs L2 QA/QC processing on raw data
         Outputs L2 netCDF file to ncData folder
         
         ControlFiles:
             L2_year.txt
             or
             L2.txt
         
         ControlFile contents (see ControlFile/Templates/L2.txt for example):
             [General]:
                 Enter list of functions to be performed
             [Files]:
                 L1 input file name and path
                 L2 output file name and path
             [Variables]:
                 Variable names and parameters for:
                     Range check to set upper and lower rejection limits
                     Diurnal check to reject observations by time of day that
                         are outside specified standard deviation limits
                     Timestamps for excluded dates
                     Timestamps for excluded hours
             [Plots]:
                 Variable lists for plot generation
         """
     logging.info(" Starting L2 processing ...")
     self.do_progress(text='Load L2 Control File ...')
     self.cf = qcio.load_controlfile(path='controlfiles')
     if len(self.cf)==0:
         logging.info( " L2: no control file chosen")
         self.do_progress(text='Waiting for input ...')
         return
     infilename = qcio.get_infilenamefromcf(self.cf)
     if not qcutils.file_exists(infilename): self.do_progress(text='An error occurred, check the console ...'); return
     self.do_progress(text='Doing L2 QC ...')
     self.ds1 = qcio.nc_read_series(infilename)
     if len(self.ds1.series.keys())==0: self.do_progress(text='An error occurred, check the console ...'); del self.ds1; return
     self.update_startenddate(str(self.ds1.series['DateTime']['Data'][0]),
                              str(self.ds1.series['DateTime']['Data'][-1]))
     self.ds2 = qcls.l2qc(self.cf,self.ds1)
     logging.info(' Finished L2 QC process')
     self.do_progress(text='Finished L2 QC process')
     self.do_progress(text='Saving L2 QC ...')                     # put up the progress message
     outfilename = qcio.get_outfilenamefromcf(self.cf)
     if len(outfilename)==0: self.do_progress(text='An error occurred, check the console ...'); return
     ncFile = qcio.nc_open_write(outfilename)
     qcio.nc_write_series(ncFile,self.ds2)                                  # save the L2 data
     self.do_progress(text='Finished saving L2 QC data')              # tdo_progressell the user we are done
     logging.info(' Finished saving L2 QC data')
     logging.info("") 
Example #8
0
def ImportSeries(cf, ds):
    # check to see if there is an Imports section
    if "Imports" not in cf.keys(): return
    # number of records
    nRecs = int(ds.globalattributes["nc_nrecs"])
    # get the start and end datetime
    ldt = ds.series["DateTime"]["Data"]
    start_date = ldt[0]
    end_date = ldt[-1]
    # loop over the series in the Imports section
    for label in cf["Imports"].keys():
        import_filename = qcutils.get_keyvaluefromcf(cf, ["Imports", label],
                                                     "file_name",
                                                     default="")
        if import_filename == "":
            msg = " ImportSeries: import filename not found in control file, skipping ..."
            logger.warning(msg)
            continue
        var_name = qcutils.get_keyvaluefromcf(cf, ["Imports", label],
                                              "var_name",
                                              default="")
        if var_name == "":
            msg = " ImportSeries: variable name not found in control file, skipping ..."
            logger.warning(msg)
            continue
        ds_import = qcio.nc_read_series(import_filename)
        ts_import = ds_import.globalattributes["time_step"]
        ldt_import = ds_import.series["DateTime"]["Data"]
        si = qcutils.GetDateIndex(ldt_import,
                                  str(start_date),
                                  ts=ts_import,
                                  default=0,
                                  match="exact")
        ei = qcutils.GetDateIndex(ldt_import,
                                  str(end_date),
                                  ts=ts_import,
                                  default=len(ldt_import) - 1,
                                  match="exact")
        data = numpy.ma.ones(nRecs) * float(c.missing_value)
        flag = numpy.ma.ones(nRecs)
        data_import, flag_import, attr_import = qcutils.GetSeriesasMA(
            ds_import, var_name, si=si, ei=ei)
        ldt_import = ldt_import[si:ei + 1]
        index = qcutils.FindIndicesOfBInA(ldt_import, ldt)
        data[index] = data_import
        flag[index] = flag_import
        qcutils.CreateSeries(ds, label, data, flag, attr_import)
Example #9
0
def compare_eddypro():
    epname = qcio.get_filename_dialog(title='Choose an EddyPro full output file')
    ofname = qcio.get_filename_dialog(title='Choose an L3 output file')
    
    ds_ep = qcio.read_eddypro_full(epname)
    ds_of = qcio.nc_read_series(ofname)
    
    dt_ep = ds_ep.series['DateTime']['Data']
    dt_of = ds_of.series['DateTime']['Data']
    
    start_datetime = max([dt_ep[0],dt_of[0]])
    end_datetime = min([dt_ep[-1],dt_of[-1]])
    
    si_of = qcutils.GetDateIndex(dt_of, str(start_datetime), ts=30, default=0, match='exact')
    ei_of = qcutils.GetDateIndex(dt_of, str(end_datetime), ts=30, default=len(dt_of), match='exact')
    si_ep = qcutils.GetDateIndex(dt_ep, str(start_datetime), ts=30, default=0, match='exact')
    ei_ep = qcutils.GetDateIndex(dt_ep, str(end_datetime), ts=30, default=len(dt_ep), match='exact')
    
    us_of = qcutils.GetVariableAsDictionary(ds_of,'ustar',si=si_of,ei=ei_of)
    us_ep = qcutils.GetVariableAsDictionary(ds_ep,'ustar',si=si_ep,ei=ei_ep)
    Fh_of = qcutils.GetVariableAsDictionary(ds_of,'Fh',si=si_of,ei=ei_of)
    Fh_ep = qcutils.GetVariableAsDictionary(ds_ep,'Fh',si=si_ep,ei=ei_ep)
    Fe_of = qcutils.GetVariableAsDictionary(ds_of,'Fe',si=si_of,ei=ei_of)
    Fe_ep = qcutils.GetVariableAsDictionary(ds_ep,'Fe',si=si_ep,ei=ei_ep)
    Fc_of = qcutils.GetVariableAsDictionary(ds_of,'Fc',si=si_of,ei=ei_of)
    Fc_ep = qcutils.GetVariableAsDictionary(ds_ep,'Fc',si=si_ep,ei=ei_ep)
    # copy the range check values from the OFQC attributes to the EP attributes
    for of, ep in zip([us_of, Fh_of, Fe_of, Fc_of], [us_ep, Fh_ep, Fe_ep, Fc_ep]):
        for item in ["rangecheck_upper", "rangecheck_lower"]:
            if item in of["Attr"]:
                ep["Attr"][item] = of["Attr"][item]
    # apply QC to the EddyPro data
    qcck.ApplyRangeCheckToVariable(us_ep)
    qcck.ApplyRangeCheckToVariable(Fc_ep)
    qcck.ApplyRangeCheckToVariable(Fe_ep)
    qcck.ApplyRangeCheckToVariable(Fh_ep)
    # plot the comparison
    plt.ion()
    fig = plt.figure(1,figsize=(8,8))
    qcplot.xyplot(us_ep["Data"],us_of["Data"],sub=[2,2,1],regr=2,xlabel='u*_EP (m/s)',ylabel='u*_OF (m/s)')
    qcplot.xyplot(Fh_ep["Data"],Fh_of["Data"],sub=[2,2,2],regr=2,xlabel='Fh_EP (W/m2)',ylabel='Fh_OF (W/m2)')
    qcplot.xyplot(Fe_ep["Data"],Fe_of["Data"],sub=[2,2,3],regr=2,xlabel='Fe_EP (W/m2)',ylabel='Fe_OF (W/m2)')
    qcplot.xyplot(Fc_ep["Data"],Fc_of["Data"],sub=[2,2,4],regr=2,xlabel='Fc_EP (umol/m2/s)',ylabel='Fc_OF (umol/m2/s)')
    plt.tight_layout()
    plt.draw()
    plt.ioff()
Example #10
0
 def do_plotL6_summary(self):
     """
         Plot L6 summary.
     """
     cf = qcio.load_controlfile(path='controlfiles')
     if len(cf)==0:
         self.do_progress(text='Waiting for input ...')
         return
     if "Options" not in cf: cf["Options"]={}
     cf["Options"]["call_mode"] = "interactive"
     l6filename = qcio.get_outfilenamefromcf(cf)
     if not qcutils.file_exists(l6filename): self.do_progress(text='An error occurred, check the console ...'); return
     ds6 = qcio.nc_read_series(l6filename)
     if len(ds6.series.keys())==0: self.do_progress(text='An error occurred, check the console ...'); del ds6; return
     self.update_startenddate(str(ds6.series['DateTime']['Data'][0]),
                              str(ds6.series['DateTime']['Data'][-1]))
     self.do_progress(text='Plotting L6 summary ...')
     qcgf.ImportSeries(cf,ds6)        
     qcrp.L6_summary(cf,ds6)
     self.do_progress(text='Finished plotting L6 summary')
     logging.info(' Finished plotting L6 summary, check the GUI')
Example #11
0
def compare_eddypro():
    epname = qcio.get_filename_dialog(title='Choose an EddyPro full output file')
    ofname = qcio.get_filename_dialog(title='Choose an L3 output file')
    
    ds_ep = qcio.read_eddypro_full(epname)
    ds_of = qcio.nc_read_series(ofname)
    
    dt_ep = ds_ep.series['DateTime']['Data']
    dt_of = ds_of.series['DateTime']['Data']
    
    si = dt_of.index(dt_ep[0])
    ei = dt_of.index(dt_ep[-1])
    
    us_of,f,a = qcutils.GetSeriesasMA(ds_of,'ustar',si=si,ei=ei)
    us_ep,f,a = qcutils.GetSeriesasMA(ds_ep,'ustar')
    Fh_of,f,a = qcutils.GetSeriesasMA(ds_of,'Fh',si=si,ei=ei)
    Fh_ep,f,a = qcutils.GetSeriesasMA(ds_ep,'Fh')
    Fe_of,f,a = qcutils.GetSeriesasMA(ds_of,'Fe',si=si,ei=ei)
    Fe_ep,f,a = qcutils.GetSeriesasMA(ds_ep,'Fe')
    Fc_of,f,a = qcutils.GetSeriesasMA(ds_of,'Fc',si=si,ei=ei)
    Fc_ep,f,a = qcutils.GetSeriesasMA(ds_ep,'Fc')
    
    us_of.mask = numpy.ma.mask_or(us_of.mask,us_ep.mask)
    us_ep.mask = numpy.ma.mask_or(us_of.mask,us_ep.mask)
    Fh_of.mask = numpy.ma.mask_or(Fh_of.mask,Fh_ep.mask)
    Fh_ep.mask = numpy.ma.mask_or(Fh_of.mask,Fh_ep.mask)
    Fe_of.mask = numpy.ma.mask_or(Fe_of.mask,Fe_ep.mask)
    Fe_ep.mask = numpy.ma.mask_or(Fe_of.mask,Fe_ep.mask)
    Fc_of.mask = numpy.ma.mask_or(Fc_of.mask,Fc_ep.mask)
    Fc_ep.mask = numpy.ma.mask_or(Fc_of.mask,Fc_ep.mask)
    
    plt.ion()
    fig = plt.figure(1,figsize=(8,8))
    qcplot.xyplot(us_ep,us_of,sub=[2,2,1],regr=1,xlabel='u*_EP (m/s)',ylabel='u*_OF (m/s)')
    qcplot.xyplot(Fh_ep,Fh_of,sub=[2,2,2],regr=1,xlabel='Fh_EP (W/m2)',ylabel='Fh_OF (W/m2)')
    qcplot.xyplot(Fe_ep,Fe_of,sub=[2,2,3],regr=1,xlabel='Fe_EP (W/m2)',ylabel='Fe_OF (W/m2)')
    qcplot.xyplot(Fc_ep,Fc_of,sub=[2,2,4],regr=1,xlabel='Fc_EP (umol/m2/s)',ylabel='Fc_OF (umol/m2/s)')
    plt.tight_layout()
    plt.draw()
    plt.ioff()
Example #12
0
 def do_plotL3L3(self):
     """
         Plot L3 (QA/QC and Corrected) data
         
         Control File for do_l3qc function used.
         If L3 Control File not loaded, requires control file selection.
         """
     if 'ds3' not in dir(self):
         self.cf = qcio.load_controlfile(path='controlfiles')
         if len(self.cf)==0: self.do_progress(text='Waiting for input ...'); return
         l3filename = qcio.get_outfilenamefromcf(self.cf)
         self.ds3 = qcio.nc_read_series(l3filename)
         if len(self.ds3.series.keys())==0: self.do_progress(text='An error occurred, check the console ...'); del self.ds3; return
         self.update_startenddate(str(self.ds3.series['DateTime']['Data'][0]),
                                  str(self.ds3.series['DateTime']['Data'][-1]))
     self.do_progress(text='Plotting L3 QC ...')
     cfname = self.ds3.globalattributes['controlfile_name']
     self.cf = qcio.get_controlfilecontents(cfname)
     for nFig in self.cf['Plots'].keys():
         si = qcutils.GetDateIndex(self.ds3.series['DateTime']['Data'],self.plotstartEntry.get(),
                                   ts=self.ds3.globalattributes['time_step'],default=0,match='exact')
         ei = qcutils.GetDateIndex(self.ds3.series['DateTime']['Data'],self.plotendEntry.get(),
                                   ts=self.ds3.globalattributes['time_step'],default=-1,match='exact')
         plt_cf = self.cf['Plots'][str(nFig)]
         if 'Type' in plt_cf.keys():
             if str(plt_cf['Type']).lower() =='xy':
                 self.do_progress(text='Plotting L3 XY ...')
                 qcplot.plotxy(self.cf,nFig,plt_cf,self.ds3,self.ds3,si,ei)
             else:
                 self.do_progress(text='Plotting L3 QC ...')
                 SeriesList = ast.literal_eval(plt_cf['Variables'])
                 qcplot.plottimeseries(self.cf,nFig,self.ds3,self.ds3,si,ei)
         else:
             self.do_progress(text='Plotting L3 QC ...')
             qcplot.plottimeseries(self.cf,nFig,self.ds3,self.ds3,si,ei)
     self.do_progress(text='Finished plotting L3')
     logging.info(' Finished plotting L3, check the GUI')
Example #13
0
nfig = 0
plotwidth = 10.9
plotheight = 7.5
# load the control file
cf = qcio.load_controlfile(path='../controlfiles')
if len(cf)==0: sys.exit()
min_n = int(cf["General"]["minimum_number"])
min_r = float(cf["General"]["minimum_correlation"])
# get the input file name
fname = qcio.get_infilenamefromcf(cf)
if not os.path.exists(fname):
    print " compare_ah: Input netCDF file "+fname+" doesn't exist"
    sys.exit()
# read the input file and return the data structure
ds = qcio.nc_read_series(fname)
if len(ds.series.keys())==0: print time.strftime('%X')+' netCDF file '+fname+' not found'; sys.exit()
# get the site name
SiteName = ds.globalattributes['site_name']
# get the time step
ts = int(ds.globalattributes['time_step'])
# get the datetime series
DateTime = ds.series['DateTime']['Data']
# get the initial start and end dates
# find the start index of the first whole day (time=00:30)
si = qcutils.GetDateIndex(DateTime,str(DateTime[0]),ts=ts,default=0,match='startnextday')
# find the end index of the last whole day (time=00:00)
ei = qcutils.GetDateIndex(DateTime,str(DateTime[-1]),ts=ts,default=-1,match='endpreviousday')
# clip the datetime series to a whole number of days
DateTime = DateTime[si:ei+1]
StartDate = DateTime[0]
import numpy
import os
import sys
# check the scripts directory is present
if not os.path.exists("../scripts/"):
    print("erai2nc: the scripts directory is missing")
    sys.exit()
# since the scripts directory is there, try importing the modules
sys.path.append('../scripts')
import qcio
import qcutils

aws_name = qcio.get_filename_dialog(path="/mnt/OzFlux/Sites")

ds_aws_30minute = qcio.nc_read_series(aws_name)
has_gaps = qcutils.CheckTimeStep(ds_aws_30minute)
if has_gaps:
    print("Problems found with time step")
    qcutils.FixTimeStep(ds_aws_30minute)
    qcutils.get_ymdhmsfromdatetime(ds_aws_30minute)
dt_aws_30minute = ds_aws_30minute.series["DateTime"]["Data"]
ddt = [
    dt_aws_30minute[i + 1] - dt_aws_30minute[i]
    for i in range(0,
                   len(dt_aws_30minute) - 1)
]
print("Minimum time step is", min(ddt), " Maximum time step is", max(ddt))

dt_aws_30minute = ds_aws_30minute.series["DateTime"]["Data"]
start_date = dt_aws_30minute[0]
end_date = dt_aws_30minute[-1]
Example #15
0
    Fscore['Fmax'] = float(numpy.ma.maximum(Fscore['values']))
    Fscore['iatFmax'] = int(numpy.ma.where(Fscore['values']==Fscore['Fmax'])[0])
    Fscore['usatFmax'] = float(us[Fscore['iatFmax']])
    return Fscore

# initialise some constants
nFig = 0
nTemp = 4
nustarbins = 50
nustarpointsperbin = 5
npointsperseason = nustarpointsperbin*nustarbins*nTemp
npointsperjump = npointsperseason/2

ncname = '../../Sites/HowardSprings/Data/Processed/2011/HowardSprings_2011_L3.nc'
# read the netCDF file
ds = qcio.nc_read_series(ncname)
nRecs = int(ds.globalattributes['nc_nrecs'])
# get the data from the data structure
Fsd,f = qcutils.GetSeriesasMA(ds,'Fsd')
Ta,f = qcutils.GetSeriesasMA(ds,'Ta')
ustar,f = qcutils.GetSeriesasMA(ds,'ustar')
# uncomment following line to use real data
Fc,f = qcutils.GetSeriesasMA(ds,'Fc')
# uncomment following 3 lines to use synthetic data
#Fc = numpy.ma.ones(nRecs)*float(5)
#index = numpy.ma.where(ustar<0.25)[0]
#Fc[index] = float(20)*ustar[index]
dt = ds.series['DateTime']['Data']
# get the night time values where Fc is not masked
index = numpy.ma.where((Fsd<10)&(Fc.mask==False)&(ustar.mask==False)&(Ta.mask==False))[0]
Fc_n = Fc[index]
Example #16
0
import matplotlib.pyplot as plt
import meteorologicalfunctions as mf
import statsmodels.api as sm
import qcio
import qcutils

# open the logging file
log = qcutils.startlog('compare_access','../logfiles/compare_access.log')

# load the control file
cf = qcio.load_controlfile(path='../controlfiles')
if len(cf)==0: sys.exit()
tow_name = cf["Files"]["tower_filename"]
acc_name = cf["Files"]["access_filename"]
# read the data series
ds_tow = qcio.nc_read_series(tow_name)
ds_acc = qcio.nc_read_series(acc_name)
# get the time step and the site name
ts = int(ds_tow.globalattributes["time_step"])
site_name = str(ds_tow.globalattributes["site_name"])
# get the start and end indices for the first and last whole days
dt_acc = ds_acc.series["DateTime"]["Data"]
si_acc = qcutils.GetDateIndex(dt_acc,str(dt_acc[0]),ts=ts,match="startnextday")
ei_acc = qcutils.GetDateIndex(dt_acc,str(dt_acc[0]),ts=ts,match="endpreviousday")
dt_acc = dt_acc[si_acc:ei_acc+1]

nrecs = len(dt_acc)
nperhr = int(float(60)/ts+0.5)
nperday = int(float(24)*nperhr+0.5)
ndays = nrecs/nperday
nrecs=ndays*nperday
Example #17
0
def CPD_run(cf):
    # Set input file and output path and create directories for plots and results
    path_out = cf['Files']['file_path']
    file_in = os.path.join(cf['Files']['file_path'],
                           cf['Files']['in_filename'])
    #
    if "out_filename" in cf['Files']:
        file_out = os.path.join(cf['Files']['file_path'],
                                cf['Files']['out_filename'])
    else:
        file_out = os.path.join(
            cf['Files']['file_path'],
            cf['Files']['in_filename'].replace(".nc", "_CPD.xls"))
    plot_path = "plots/"
    if "plot_path" in cf["Files"]:
        plot_path = os.path.join(cf["Files"]["plot_path"], "CPD/")
    if not os.path.isdir(plot_path): os.makedirs(plot_path)
    results_path = path_out
    if not os.path.isdir(results_path): os.makedirs(results_path)
    # get a dictionary of the variable names
    var_list = cf["Variables"].keys()
    names = {}
    for item in var_list:
        if "AltVarName" in cf["Variables"][item].keys():
            names[item] = cf["Variables"][item]["AltVarName"]
        else:
            names[item] = item
    # add the xlDateTime
    names["xlDateTime"] = "xlDateTime"
    names["Year"] = "Year"
    # read the netcdf file
    logger.info(' Reading netCDF file ' + file_in)
    ds = qcio.nc_read_series(file_in)
    dates_list = ds.series["DateTime"]["Data"]
    nrecs = int(ds.globalattributes["nc_nrecs"])
    # now get the data
    d = {}
    f = {}
    for item in names.keys():
        data, flag, attr = qcutils.GetSeries(ds, names[item])
        d[item] = np.where(data == c.missing_value, np.nan, data)
        f[item] = flag
    # set all data to NaNs if any flag not 0 or 10
    for item in f.keys():
        for f_OK in [0, 10]:
            idx = np.where(f[item] != 0)[0]
            if len(idx) != 0:
                for itemd in d.keys():
                    d[itemd][idx] = np.nan
    df = pd.DataFrame(d, index=dates_list)
    # replace missing values with NaN
    df.replace(c.missing_value, np.nan)
    # Build dictionary of additional configs
    d = {}
    d['radiation_threshold'] = int(cf['Options']['Fsd_threshold'])
    d['num_bootstraps'] = int(cf['Options']['Num_bootstraps'])
    d['flux_period'] = int(ds.globalattributes["time_step"])
    d['site_name'] = ds.globalattributes["site_name"]
    d["call_mode"] = qcutils.get_keyvaluefromcf(cf, ["Options"],
                                                "call_mode",
                                                default="interactive",
                                                mode="quiet")
    d["show_plots"] = qcutils.get_keyvaluefromcf(cf, ["Options"],
                                                 "show_plots",
                                                 default=True,
                                                 mode="quiet")
    d['plot_tclass'] = False
    if cf['Options']['Plot_TClass'] == 'True': d['plot_tclass'] = True
    if cf['Options']['Output_plots'] == 'True':
        d['plot_path'] = plot_path
    if cf['Options']['Output_results'] == 'True':
        d['results_path'] = results_path
        d["file_out"] = file_out

    return df, d
Example #18
0
# coding: utf-8
import qcio
import qcutils
import statsmodels.api as sm
duname=qcio.get_filename_dialog()
drname=qcio.get_filename_dialog()
ds_du=qcio.nc_read_series(duname)
ds_dr=qcio.nc_read_series(drname)
dt_du=ds.series["DateTime"]["Data"]
dt_du=ds_du.series["DateTime"]["Data"]
dt_dr=ds_dr.series["DateTime"]["Data"]
print dt_du[0],dt_dr[0]
print dt_du[-1],dt_dr[-1]
ts=ds_du.globalattributes["time_step"]
si=qcutils.GetDateIndex(dt_du,str(dt_du[0]),ts=ts,match="startnextday")
ei=qcutils.GetDateIndex(dt_du,str(dt_dr[-1]),ts=ts,match="endpreviousday")
print si,ei
Fsd_du,f=qcutils.GetSeriesasMA(ds_du,'Fsd',si=si,ei=ei)
Fsd_dr,f=qcutils.GetSeriesasMA(ds_dr,'Fsd',si=si,ei=ei)
plot(Fsd_dr,Fsd_du)
ei=qcutils.GetDateIndex(ds_du,"2009-01-01 00:00",ts=ts,match="endpreviousday")
ei=qcutils.GetDateIndex(dt_du,"2009-01-01 00:00",ts=ts,match="endpreviousday")
fig=figure(1)
Fsd_du_2008,f=qcutils.GetSeriesasMA(ds_du,'Fsd',si=si,ei=ei)
Fsd_dr_2008,f=qcutils.GetSeriesasMA(ds_dr,'Fsd',si=si,ei=ei)
plot(Fsd_du_2008,Fsd_dr_2008,'b.')
Fsd_du_2008.mask=(Fsd_du_2008.mask==True)|(Fsd_dr_2008.mask==True)
Fsd_dr_2008.mask=(Fsd_du_2008.mask==True)|(Fsd_dr_2008.mask==True)
Fsd_du_2008=numpy.ma.compressed(Fsd_du_2008)
Fsd_dr_2008=numpy.ma.compressed(Fsd_dr_2008)
x=Fsd_du_2008
Example #19
0
nfig = 0
plotwidth = 10.9
plotheight = 7.5
# load the control file
cf = qcio.load_controlfile(path='../controlfiles')
if len(cf) == 0: sys.exit()
min_n = int(cf["General"]["minimum_number"])
min_r = float(cf["General"]["minimum_correlation"])
# get the input file name
fname = qcio.get_infilenamefromcf(cf)
if not os.path.exists(fname):
    print " compare_ah: Input netCDF file " + fname + " doesn't exist"
    sys.exit()
# read the input file and return the data structure
ds = qcio.nc_read_series(fname)
if len(ds.series.keys()) == 0:
    print time.strftime('%X') + ' netCDF file ' + fname + ' not found'
    sys.exit()
# get the site name
SiteName = ds.globalattributes['site_name']
# get the time step
ts = int(ds.globalattributes['time_step'])
# get the datetime series
DateTime = ds.series['DateTime']['Data']
# get the initial start and end dates
# find the start index of the first whole day (time=00:30)
si = qcutils.GetDateIndex(DateTime,
                          str(DateTime[0]),
                          ts=ts,
                          default=0,
Example #20
0
def climatology(cf):
    nc_filename = qcio.get_infilenamefromcf(cf)
    if not qcutils.file_exists(nc_filename): return
    xl_filename = nc_filename.replace(".nc","_Climatology.xls")
    xlFile = xlwt.Workbook()
    ds = qcio.nc_read_series(nc_filename)
    # calculate Fa if it is not in the data structure
    if "Fa" not in ds.series.keys():
        if "Fn" in ds.series.keys() and "Fg" in ds.series.keys():
            qcts.CalculateAvailableEnergy(ds,Fa_out='Fa',Fn_in='Fn',Fg_in='Fg')
        else:
            log.warning(" Climatology: Fn or Fg not in data struicture")
    # get the time step
    ts = int(ds.globalattributes['time_step'])
    # get the site name
    SiteName = ds.globalattributes['site_name']
    # get the datetime series
    dt = ds.series['DateTime']['Data']
    Hdh = ds.series['Hdh']['Data']
    Month = ds.series['Month']['Data']
    # get the initial start and end dates
    StartDate = str(dt[0])
    EndDate = str(dt[-1])
    # find the start index of the first whole day (time=00:30)
    si = qcutils.GetDateIndex(dt,StartDate,ts=ts,default=0,match='startnextday')
    # find the end index of the last whole day (time=00:00)
    ei = qcutils.GetDateIndex(dt,EndDate,ts=ts,default=-1,match='endpreviousday')
    # get local views of the datetime series
    ldt = dt[si:ei+1]
    Hdh = Hdh[si:ei+1]
    Month = Month[si:ei+1]
    # get the number of time steps in a day and the number of days in the data
    ntsInDay = int(24.0*60.0/float(ts))
    nDays = int(len(ldt))/ntsInDay
    
    for ThisOne in cf['Variables'].keys():
        if "AltVarName" in cf['Variables'][ThisOne].keys(): ThisOne = cf['Variables'][ThisOne]["AltVarName"]
        if ThisOne in ds.series.keys():
            log.info(" Doing climatology for "+ThisOne)
            data,f,a = qcutils.GetSeriesasMA(ds,ThisOne,si=si,ei=ei)
            if numpy.ma.count(data)==0:
                log.warning(" No data for "+ThisOne+", skipping ...")
                continue
            fmt_str = get_formatstring(cf,ThisOne,fmt_def='')
            xlSheet = xlFile.add_sheet(ThisOne)
            Av_all = do_diurnalstats(Month,Hdh,data,xlSheet,format_string=fmt_str,ts=ts)
            # now do it for each day
            # we want to preserve any data that has been truncated by the use of the "startnextday"
            # and "endpreviousday" match options used above.  Here we revisit the start and end indices
            # and adjust these backwards and forwards respectively if data has been truncated.
            nDays_daily = nDays
            ei_daily = ei
            si_daily = si
            sdate = ldt[0]
            edate = ldt[-1]
            # is there data after the current end date?
            if dt[-1]>ldt[-1]:
                # if so, push the end index back by 1 day so it is included
                ei_daily = ei + ntsInDay
                nDays_daily = nDays_daily + 1
                edate = ldt[-1]+datetime.timedelta(days=1)
            # is there data before the current start date?
            if dt[0]<ldt[0]:
                # if so, push the start index back by 1 day so it is included
                si_daily = si - ntsInDay
                nDays_daily = nDays_daily + 1
                sdate = ldt[0]-datetime.timedelta(days=1)
            # get the data and use the "pad" option to add missing data if required to
            # complete the extra days
            data,f,a = qcutils.GetSeriesasMA(ds,ThisOne,si=si_daily,ei=ei_daily,mode="pad")
            data_daily = data.reshape(nDays_daily,ntsInDay)
            xlSheet = xlFile.add_sheet(ThisOne+'(day)')
            write_data_1columnpertimestep(xlSheet, data_daily, ts, startdate=sdate, format_string=fmt_str)
            data_daily_i = do_2dinterpolation(data_daily)
            xlSheet = xlFile.add_sheet(ThisOne+'i(day)')
            write_data_1columnpertimestep(xlSheet, data_daily_i, ts, startdate=sdate, format_string=fmt_str)
        elif ThisOne=="EF":
            log.info(" Doing evaporative fraction")
            EF = numpy.ma.zeros([48,12]) + float(c.missing_value)
            Hdh,f,a = qcutils.GetSeriesasMA(ds,'Hdh',si=si,ei=ei)
            Fa,f,a = qcutils.GetSeriesasMA(ds,'Fa',si=si,ei=ei)
            Fe,f,a = qcutils.GetSeriesasMA(ds,'Fe',si=si,ei=ei)
            for m in range(1,13):
                mi = numpy.where(Month==m)[0]
                Fa_Num,Hr,Fa_Av,Sd,Mx,Mn = get_diurnalstats(Hdh[mi],Fa[mi],ts)
                Fe_Num,Hr,Fe_Av,Sd,Mx,Mn = get_diurnalstats(Hdh[mi],Fe[mi],ts)
                index = numpy.ma.where((Fa_Num>4)&(Fe_Num>4))
                EF[:,m-1][index] = Fe_Av[index]/Fa_Av[index]
            # reject EF values greater than upper limit or less than lower limit
            upr, lwr = get_rangecheck_limit(cf,'EF')
            EF = numpy.ma.filled(numpy.ma.masked_where((EF>upr)|(EF<lwr),EF),float(c.missing_value))
            # write the EF to the Excel file
            xlSheet = xlFile.add_sheet('EF')
            write_data_1columnpermonth(xlSheet, EF, ts, format_string='0.00')
            # do the 2D interpolation to fill missing EF values
            EFi = do_2dinterpolation(EF)
            xlSheet = xlFile.add_sheet('EFi')
            write_data_1columnpermonth(xlSheet, EFi, ts, format_string='0.00')
            # now do EF for each day
            Fa,f,a = qcutils.GetSeriesasMA(ds,'Fa',si=si,ei=ei)
            Fe,f,a = qcutils.GetSeriesasMA(ds,'Fe',si=si,ei=ei)
            EF = Fe/Fa
            EF = numpy.ma.filled(numpy.ma.masked_where((EF>upr)|(EF<lwr),EF),float(c.missing_value))
            EF_daily = EF.reshape(nDays,ntsInDay)
            xlSheet = xlFile.add_sheet('EF(day)')
            write_data_1columnpertimestep(xlSheet, EF_daily, ts, startdate=ldt[0], format_string='0.00')
            EFi = do_2dinterpolation(EF_daily)
            xlSheet = xlFile.add_sheet('EFi(day)')
            write_data_1columnpertimestep(xlSheet, EFi, ts, startdate=ldt[0], format_string='0.00')
        elif ThisOne=="BR":
            log.info(" Doing Bowen ratio")
            BR = numpy.ma.zeros([48,12]) + float(c.missing_value)
            Fe,f,a = qcutils.GetSeriesasMA(ds,'Fe',si=si,ei=ei)
            Fh,f,a = qcutils.GetSeriesasMA(ds,'Fh',si=si,ei=ei)
            for m in range(1,13):
                mi = numpy.where(Month==m)[0]
                Fh_Num,Hr,Fh_Av,Sd,Mx,Mn = get_diurnalstats(Hdh[mi],Fh[mi],ts)
                Fe_Num,Hr,Fe_Av,Sd,Mx,Mn = get_diurnalstats(Hdh[mi],Fe[mi],ts)
                index = numpy.ma.where((Fh_Num>4)&(Fe_Num>4))
                BR[:,m-1][index] = Fh_Av[index]/Fe_Av[index]
            # reject BR values greater than upper limit or less than lower limit
            upr,lwr = get_rangecheck_limit(cf,'BR')
            BR = numpy.ma.filled(numpy.ma.masked_where((BR>upr)|(BR<lwr),BR),float(c.missing_value))
            # write the BR to the Excel file
            xlSheet = xlFile.add_sheet('BR')
            write_data_1columnpermonth(xlSheet, BR, ts, format_string='0.00')
            # do the 2D interpolation to fill missing EF values
            BRi = do_2dinterpolation(BR)
            xlSheet = xlFile.add_sheet('BRi')
            write_data_1columnpermonth(xlSheet, BRi, ts, format_string='0.00')
            # now do BR for each day ...
            Fe,f,a = qcutils.GetSeriesasMA(ds,'Fe',si=si,ei=ei)
            Fh,f,a = qcutils.GetSeriesasMA(ds,'Fh',si=si,ei=ei)
            BR = Fh/Fe
            BR = numpy.ma.filled(numpy.ma.masked_where((BR>upr)|(BR<lwr),BR),float(c.missing_value))
            BR_daily = BR.reshape(nDays,ntsInDay)
            xlSheet = xlFile.add_sheet('BR(day)')
            write_data_1columnpertimestep(xlSheet, BR_daily, ts, startdate=ldt[0], format_string='0.00')
            BRi = do_2dinterpolation(BR_daily)
            xlSheet = xlFile.add_sheet('BRi(day)')
            write_data_1columnpertimestep(xlSheet, BRi, ts, startdate=ldt[0], format_string='0.00')
        elif ThisOne=="WUE":
            log.info(" Doing ecosystem WUE")
            WUE = numpy.ma.zeros([48,12]) + float(c.missing_value)
            Fe,f,a = qcutils.GetSeriesasMA(ds,'Fe',si=si,ei=ei)
            Fc,f,a = qcutils.GetSeriesasMA(ds,'Fc',si=si,ei=ei)
            for m in range(1,13):
                mi = numpy.where(Month==m)[0]
                Fc_Num,Hr,Fc_Av,Sd,Mx,Mn = get_diurnalstats(Hdh[mi],Fc[mi],ts)
                Fe_Num,Hr,Fe_Av,Sd,Mx,Mn = get_diurnalstats(Hdh[mi],Fe[mi],ts)
                index = numpy.ma.where((Fc_Num>4)&(Fe_Num>4))
                WUE[:,m-1][index] = Fc_Av[index]/Fe_Av[index]
            # reject WUE values greater than upper limit or less than lower limit
            upr,lwr = get_rangecheck_limit(cf,'WUE')
            WUE = numpy.ma.filled(numpy.ma.masked_where((WUE>upr)|(WUE<lwr),WUE),float(c.missing_value))
            # write the WUE to the Excel file
            xlSheet = xlFile.add_sheet('WUE')
            write_data_1columnpermonth(xlSheet, WUE, ts, format_string='0.00000')
            # do the 2D interpolation to fill missing EF values
            WUEi = do_2dinterpolation(WUE)
            xlSheet = xlFile.add_sheet('WUEi')
            write_data_1columnpermonth(xlSheet, WUEi, ts, format_string='0.00000')
            # now do WUE for each day ...
            Fe,f,a = qcutils.GetSeriesasMA(ds,'Fe',si=si,ei=ei)
            Fc,f,a = qcutils.GetSeriesasMA(ds,'Fc',si=si,ei=ei)
            WUE = Fc/Fe
            WUE = numpy.ma.filled(numpy.ma.masked_where((WUE>upr)|(WUE<lwr),WUE),float(c.missing_value))
            WUE_daily = WUE.reshape(nDays,ntsInDay)
            xlSheet = xlFile.add_sheet('WUE(day)')
            write_data_1columnpertimestep(xlSheet, WUE_daily, ts, startdate=ldt[0], format_string='0.00000')
            WUEi = do_2dinterpolation(WUE_daily)
            xlSheet = xlFile.add_sheet('WUEi(day)')
            write_data_1columnpertimestep(xlSheet, WUEi, ts, startdate=ldt[0], format_string='0.00000')
        else:
            log.warning(" qcclim.climatology: requested variable "+ThisOne+" not in data structure")
            continue
    log.info(" Saving Excel file "+xl_filename)
    xlFile.save(xl_filename)
Example #21
0
def gfalternate_createdict(cf, ds, series, ds_alt):
    """
    Purpose:
     Creates a dictionary in ds to hold information about the alternate data used to gap fill the tower data.
    Usage:
    Side effects:
    Author: PRI
    Date: August 2014
    """
    # get the section of the control file containing the series
    section = qcutils.get_cfsection(cf, series=series, mode="quiet")
    # return without doing anything if the series isn't in a control file section
    if len(section) == 0:
        logger.error(
            "GapFillFromAlternate: Series %s not found in control file, skipping ...",
            series)
        return
    # create the alternate directory in the data structure
    if "alternate" not in dir(ds):
        ds.alternate = {}
    # name of alternate output series in ds
    output_list = cf[section][series]["GapFillFromAlternate"].keys()
    # loop over the outputs listed in the control file
    for output in output_list:
        # create the dictionary keys for this output
        ds.alternate[output] = {}
        ds.alternate[output]["label_tower"] = series
        # source name
        ds.alternate[output]["source"] = cf[section][series][
            "GapFillFromAlternate"][output]["source"]
        # site name
        ds.alternate[output]["site_name"] = ds.globalattributes["site_name"]
        # alternate data file name
        # first, look in the [Files] section for a generic file name
        file_list = cf["Files"].keys()
        lower_file_list = [item.lower() for item in file_list]
        if ds.alternate[output]["source"].lower() in lower_file_list:
            # found a generic file name
            i = lower_file_list.index(ds.alternate[output]["source"].lower())
            ds.alternate[output]["file_name"] = cf["Files"][file_list[i]]
        else:
            # no generic file name found, look for a file name in the variable section
            ds.alternate[output]["file_name"] = cf[section][series][
                "GapFillFromAlternate"][output]["file_name"]
        # if the file has not already been read, do it now
        if ds.alternate[output]["file_name"] not in ds_alt:
            ds_alternate = qcio.nc_read_series(
                ds.alternate[output]["file_name"], fixtimestepmethod="round")
            gfalternate_matchstartendtimes(ds, ds_alternate)
            ds_alt[ds.alternate[output]["file_name"]] = ds_alternate
        # get the type of fit
        ds.alternate[output]["fit_type"] = "OLS"
        if "fit" in cf[section][series]["GapFillFromAlternate"][output]:
            if cf[section][series]["GapFillFromAlternate"][output][
                    "fit"].lower() in [
                        "ols", "ols_thru0", "mrev", "replace", "rma", "odr"
                    ]:
                ds.alternate[output]["fit_type"] = cf[section][series][
                    "GapFillFromAlternate"][output]["fit"]
            else:
                logger.info(
                    "gfAlternate: unrecognised fit option for series %s, used OLS",
                    output)
        # correct for lag?
        if "lag" in cf[section][series]["GapFillFromAlternate"][output]:
            if cf[section][series]["GapFillFromAlternate"][output][
                    "lag"].lower() in ["no", "false"]:
                ds.alternate[output]["lag"] = "no"
            elif cf[section][series]["GapFillFromAlternate"][output][
                    "lag"].lower() in ["yes", "true"]:
                ds.alternate[output]["lag"] = "yes"
            else:
                logger.info(
                    "gfAlternate: unrecognised lag option for series %s",
                    output)
        else:
            ds.alternate[output]["lag"] = "yes"
        # choose specific alternate variable?
        if "usevars" in cf[section][series]["GapFillFromAlternate"][output]:
            ds.alternate[output]["usevars"] = ast.literal_eval(
                cf[section][series]["GapFillFromAlternate"][output]["usevars"])
        # alternate data variable name if different from name used in control file
        if "alternate_name" in cf[section][series]["GapFillFromAlternate"][
                output]:
            ds.alternate[output]["alternate_name"] = cf[section][series][
                "GapFillFromAlternate"][output]["alternate_name"]
        else:
            ds.alternate[output]["alternate_name"] = series
        # results of best fit for plotting later on
        ds.alternate[output]["results"] = {
            "startdate": [],
            "enddate": [],
            "No. points": [],
            "No. filled": [],
            "r": [],
            "Bias": [],
            "RMSE": [],
            "Frac Bias": [],
            "NMSE": [],
            "Avg (Tower)": [],
            "Avg (Alt)": [],
            "Var (Tower)": [],
            "Var (Alt)": [],
            "Var ratio": []
        }
        # create an empty series in ds if the alternate output series doesn't exist yet
        if output not in ds.series.keys():
            data, flag, attr = qcutils.MakeEmptySeries(ds, output)
            qcutils.CreateSeries(ds, output, data, flag, attr)
            qcutils.CreateSeries(ds, series + "_composite", data, flag, attr)
Example #22
0
 def do_l3qc(self):
     """
         Call qcls.l3qc_sitename function
         Performs L3 Corrections and QA/QC processing on L2 data
         Outputs L3 netCDF file to ncData folder
         Outputs L3 netCDF file to OzFlux folder
         
         Available corrections:
         * corrections requiring ancillary measurements or samples
           marked with an asterisk
             Linear correction
                 fixed slope
                 linearly shifting slope
             Conversion of virtual temperature to actual temperature
             2D Coordinate rotation
             Massman correction for frequency attenuation*
             Webb, Pearman and Leuning correction for flux effects on density
                 measurements
             Conversion of virtual heat flux to actual heat flux
             Correction of soil moisture content to empirical calibration
                 curve*
             Addition of soil heat storage to ground ground heat flux*
         
         ControlFiles:
             L3_year.txt
             or
             L3a.txt
         
         ControlFile contents (see ControlFile/Templates/L3.txt for example):
             [General]:
                 Python control parameters
             [Files]:
                 L2 input file name and path
                 L3 output file name and ncData folder path
                 L3 OzFlux output file name and OzFlux folder path
             [Massman] (where available):
                 Constants used in frequency attenuation correction
                     zmd: instrument height (z) less zero-plane displacement
                         height (d), m
                     z0: aerodynamic roughness length, m
                     angle: angle from CSAT mounting point between CSAT and
                         IRGA mid-path, degrees
                     CSATarm: distance from CSAT mounting point to CSAT
                         mid-path, m
                     IRGAarm: distance from CSAT mounting point to IRGA
                         mid-path, m
             [Soil]:
                 Constants used in correcting Fg for storage and in empirical
                 corrections of soil water content 
                     FgDepth: Heat flux plate depth, m
                     BulkDensity: Soil bulk density, kg/m3
                     OrganicContent: Soil organic content, fraction
                     SwsDefault
                     Constants for empirical corrections using log(sensor)
                         and exp(sensor) functions (SWC_a0, SWC_a1, SWC_b0,
                         SWC_b1, SWC_t, TDR_a0, TDR_a1, TDR_b0, TDR_b1,
                         TDR_t)
                     Variable and attributes lists (empSWCin, empSWCout,
                         empTDRin, empTDRout, linTDRin, SWCattr, TDRattr)
             [Output]:
                 Variable subset list for OzFlux output file
             [Variables]:
                 Variable names and parameters for:
                     Range check to set upper and lower rejection limits
                     Diurnal check to reject observations by time of day that
                         are outside specified standard deviation limits
                     Timestamps, slope, and offset for Linear correction
             [Plots]:
                 Variable lists for plot generation
         """
     logging.info(" Starting L3 processing ...")
     self.cf = qcio.load_controlfile(path='controlfiles')
     if len(self.cf)==0:
         logging.info( " L3: no control file chosen")            
         self.do_progress(text='Waiting for input ...')
         return
     infilename = qcio.get_infilenamefromcf(self.cf)
     if not qcutils.file_exists(infilename): self.do_progress(text='An error occurred, check the console ...'); return
     self.ds2 = qcio.nc_read_series(infilename)
     if len(self.ds2.series.keys())==0: self.do_progress(text='An error occurred, check the console ...'); del self.ds2; return
     self.update_startenddate(str(self.ds2.series['DateTime']['Data'][0]),
                              str(self.ds2.series['DateTime']['Data'][-1]))
     self.do_progress(text='Doing L3 QC & Corrections ...')
     self.ds3 = qcls.l3qc(self.cf,self.ds2)
     self.do_progress(text='Finished L3')
     txtstr = ' Finished L3: Standard processing for site: '
     txtstr = txtstr+self.ds3.globalattributes['site_name'].replace(' ','')
     logging.info(txtstr)
     self.do_progress(text='Saving L3 QC & Corrected NetCDF data ...')       # put up the progress message
     outfilename = qcio.get_outfilenamefromcf(self.cf)
     if len(outfilename)==0: self.do_progress(text='An error occurred, check the console ...'); return
     ncFile = qcio.nc_open_write(outfilename)
     outputlist = qcio.get_outputlistfromcf(self.cf,'nc')
     qcio.nc_write_series(ncFile,self.ds3,outputlist=outputlist)             # save the L3 data
     self.do_progress(text='Finished saving L3 QC & Corrected NetCDF data')  # tell the user we are done
     logging.info(' Finished saving L3 QC & Corrected NetCDF data')
     logging.info("") 
Example #23
0
import numpy
import os
import sys
# check the scripts directory is present
if not os.path.exists("../scripts/"):
    print "erai2nc: the scripts directory is missing"
    sys.exit()
# since the scripts directory is there, try importing the modules
sys.path.append('../scripts')
import qcio
import qcutils

aws_name=qcio.get_filename_dialog(path="/mnt/OzFlux/Sites")

ds_aws_30minute = qcio.nc_read_series(aws_name)
has_gaps = qcutils.CheckTimeStep(ds_aws_30minute)
if has_gaps:
    print "Problems found with time step"
    qcutils.FixTimeStep(ds_aws_30minute)
    qcutils.get_ymdhmsfromdatetime(ds_aws_30minute)
dt_aws_30minute = ds_aws_30minute.series["DateTime"]["Data"]
ddt=[dt_aws_30minute[i+1]-dt_aws_30minute[i] for i in range(0,len(dt_aws_30minute)-1)]
print "Minimum time step is",min(ddt)," Maximum time step is",max(ddt)

dt_aws_30minute = ds_aws_30minute.series["DateTime"]["Data"]
start_date = dt_aws_30minute[0]
end_date = dt_aws_30minute[-1]
si_wholehour = qcutils.GetDateIndex(dt_aws_30minute,str(start_date),ts=30,match="startnexthour")
ei_wholehour = qcutils.GetDateIndex(dt_aws_30minute,str(end_date),ts=30,match="endprevioushour")
start_date = dt_aws_30minute[si_wholehour]
end_date = dt_aws_30minute[ei_wholehour]
Example #24
0
def climatology(cf):
    nc_filename = qcio.get_infilenamefromcf(cf)
    if not qcutils.file_exists(nc_filename): return
    xl_filename = nc_filename.replace(".nc", "_Climatology.xls")
    xlFile = xlwt.Workbook()
    ds = qcio.nc_read_series(nc_filename)
    # calculate Fa if it is not in the data structure
    if "Fa" not in ds.series.keys():
        if "Fn" in ds.series.keys() and "Fg" in ds.series.keys():
            qcts.CalculateAvailableEnergy(ds,
                                          Fa_out='Fa',
                                          Fn_in='Fn',
                                          Fg_in='Fg')
        else:
            log.warning(" Climatology: Fn or Fg not in data struicture")
    # get the time step
    ts = int(ds.globalattributes['time_step'])
    # get the site name
    SiteName = ds.globalattributes['site_name']
    # get the datetime series
    dt = ds.series['DateTime']['Data']
    Hdh = ds.series['Hdh']['Data']
    Month = ds.series['Month']['Data']
    # get the initial start and end dates
    StartDate = str(dt[0])
    EndDate = str(dt[-1])
    # find the start index of the first whole day (time=00:30)
    si = qcutils.GetDateIndex(dt,
                              StartDate,
                              ts=ts,
                              default=0,
                              match='startnextday')
    # find the end index of the last whole day (time=00:00)
    ei = qcutils.GetDateIndex(dt,
                              EndDate,
                              ts=ts,
                              default=-1,
                              match='endpreviousday')
    # get local views of the datetime series
    ldt = dt[si:ei + 1]
    Hdh = Hdh[si:ei + 1]
    Month = Month[si:ei + 1]
    # get the number of time steps in a day and the number of days in the data
    ntsInDay = int(24.0 * 60.0 / float(ts))
    nDays = int(len(ldt)) / ntsInDay

    for ThisOne in cf['Variables'].keys():
        if "AltVarName" in cf['Variables'][ThisOne].keys():
            ThisOne = cf['Variables'][ThisOne]["AltVarName"]
        if ThisOne in ds.series.keys():
            log.info(" Doing climatology for " + ThisOne)
            data, f, a = qcutils.GetSeriesasMA(ds, ThisOne, si=si, ei=ei)
            if numpy.ma.count(data) == 0:
                log.warning(" No data for " + ThisOne + ", skipping ...")
                continue
            fmt_str = get_formatstring(cf, ThisOne, fmt_def='')
            xlSheet = xlFile.add_sheet(ThisOne)
            Av_all = do_diurnalstats(Month,
                                     Hdh,
                                     data,
                                     xlSheet,
                                     format_string=fmt_str,
                                     ts=ts)
            # now do it for each day
            # we want to preserve any data that has been truncated by the use of the "startnextday"
            # and "endpreviousday" match options used above.  Here we revisit the start and end indices
            # and adjust these backwards and forwards respectively if data has been truncated.
            nDays_daily = nDays
            ei_daily = ei
            si_daily = si
            sdate = ldt[0]
            edate = ldt[-1]
            # is there data after the current end date?
            if dt[-1] > ldt[-1]:
                # if so, push the end index back by 1 day so it is included
                ei_daily = ei + ntsInDay
                nDays_daily = nDays_daily + 1
                edate = ldt[-1] + datetime.timedelta(days=1)
            # is there data before the current start date?
            if dt[0] < ldt[0]:
                # if so, push the start index back by 1 day so it is included
                si_daily = si - ntsInDay
                nDays_daily = nDays_daily + 1
                sdate = ldt[0] - datetime.timedelta(days=1)
            # get the data and use the "pad" option to add missing data if required to
            # complete the extra days
            data, f, a = qcutils.GetSeriesasMA(ds,
                                               ThisOne,
                                               si=si_daily,
                                               ei=ei_daily,
                                               mode="pad")
            data_daily = data.reshape(nDays_daily, ntsInDay)
            xlSheet = xlFile.add_sheet(ThisOne + '(day)')
            write_data_1columnpertimestep(xlSheet,
                                          data_daily,
                                          ts,
                                          startdate=sdate,
                                          format_string=fmt_str)
            data_daily_i = do_2dinterpolation(data_daily)
            xlSheet = xlFile.add_sheet(ThisOne + 'i(day)')
            write_data_1columnpertimestep(xlSheet,
                                          data_daily_i,
                                          ts,
                                          startdate=sdate,
                                          format_string=fmt_str)
        elif ThisOne == "EF":
            log.info(" Doing evaporative fraction")
            EF = numpy.ma.zeros([48, 12]) + float(c.missing_value)
            Hdh, f, a = qcutils.GetSeriesasMA(ds, 'Hdh', si=si, ei=ei)
            Fa, f, a = qcutils.GetSeriesasMA(ds, 'Fa', si=si, ei=ei)
            Fe, f, a = qcutils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei)
            for m in range(1, 13):
                mi = numpy.where(Month == m)[0]
                Fa_Num, Hr, Fa_Av, Sd, Mx, Mn = get_diurnalstats(
                    Hdh[mi], Fa[mi], ts)
                Fe_Num, Hr, Fe_Av, Sd, Mx, Mn = get_diurnalstats(
                    Hdh[mi], Fe[mi], ts)
                index = numpy.ma.where((Fa_Num > 4) & (Fe_Num > 4))
                EF[:, m - 1][index] = Fe_Av[index] / Fa_Av[index]
            # reject EF values greater than upper limit or less than lower limit
            upr, lwr = get_rangecheck_limit(cf, 'EF')
            EF = numpy.ma.filled(
                numpy.ma.masked_where((EF > upr) | (EF < lwr), EF),
                float(c.missing_value))
            # write the EF to the Excel file
            xlSheet = xlFile.add_sheet('EF')
            write_data_1columnpermonth(xlSheet, EF, ts, format_string='0.00')
            # do the 2D interpolation to fill missing EF values
            EFi = do_2dinterpolation(EF)
            xlSheet = xlFile.add_sheet('EFi')
            write_data_1columnpermonth(xlSheet, EFi, ts, format_string='0.00')
            # now do EF for each day
            Fa, f, a = qcutils.GetSeriesasMA(ds, 'Fa', si=si, ei=ei)
            Fe, f, a = qcutils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei)
            EF = Fe / Fa
            EF = numpy.ma.filled(
                numpy.ma.masked_where((EF > upr) | (EF < lwr), EF),
                float(c.missing_value))
            EF_daily = EF.reshape(nDays, ntsInDay)
            xlSheet = xlFile.add_sheet('EF(day)')
            write_data_1columnpertimestep(xlSheet,
                                          EF_daily,
                                          ts,
                                          startdate=ldt[0],
                                          format_string='0.00')
            EFi = do_2dinterpolation(EF_daily)
            xlSheet = xlFile.add_sheet('EFi(day)')
            write_data_1columnpertimestep(xlSheet,
                                          EFi,
                                          ts,
                                          startdate=ldt[0],
                                          format_string='0.00')
        elif ThisOne == "BR":
            log.info(" Doing Bowen ratio")
            BR = numpy.ma.zeros([48, 12]) + float(c.missing_value)
            Fe, f, a = qcutils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei)
            Fh, f, a = qcutils.GetSeriesasMA(ds, 'Fh', si=si, ei=ei)
            for m in range(1, 13):
                mi = numpy.where(Month == m)[0]
                Fh_Num, Hr, Fh_Av, Sd, Mx, Mn = get_diurnalstats(
                    Hdh[mi], Fh[mi], ts)
                Fe_Num, Hr, Fe_Av, Sd, Mx, Mn = get_diurnalstats(
                    Hdh[mi], Fe[mi], ts)
                index = numpy.ma.where((Fh_Num > 4) & (Fe_Num > 4))
                BR[:, m - 1][index] = Fh_Av[index] / Fe_Av[index]
            # reject BR values greater than upper limit or less than lower limit
            upr, lwr = get_rangecheck_limit(cf, 'BR')
            BR = numpy.ma.filled(
                numpy.ma.masked_where((BR > upr) | (BR < lwr), BR),
                float(c.missing_value))
            # write the BR to the Excel file
            xlSheet = xlFile.add_sheet('BR')
            write_data_1columnpermonth(xlSheet, BR, ts, format_string='0.00')
            # do the 2D interpolation to fill missing EF values
            BRi = do_2dinterpolation(BR)
            xlSheet = xlFile.add_sheet('BRi')
            write_data_1columnpermonth(xlSheet, BRi, ts, format_string='0.00')
            # now do BR for each day ...
            Fe, f, a = qcutils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei)
            Fh, f, a = qcutils.GetSeriesasMA(ds, 'Fh', si=si, ei=ei)
            BR = Fh / Fe
            BR = numpy.ma.filled(
                numpy.ma.masked_where((BR > upr) | (BR < lwr), BR),
                float(c.missing_value))
            BR_daily = BR.reshape(nDays, ntsInDay)
            xlSheet = xlFile.add_sheet('BR(day)')
            write_data_1columnpertimestep(xlSheet,
                                          BR_daily,
                                          ts,
                                          startdate=ldt[0],
                                          format_string='0.00')
            BRi = do_2dinterpolation(BR_daily)
            xlSheet = xlFile.add_sheet('BRi(day)')
            write_data_1columnpertimestep(xlSheet,
                                          BRi,
                                          ts,
                                          startdate=ldt[0],
                                          format_string='0.00')
        elif ThisOne == "WUE":
            log.info(" Doing ecosystem WUE")
            WUE = numpy.ma.zeros([48, 12]) + float(c.missing_value)
            Fe, f, a = qcutils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei)
            Fc, f, a = qcutils.GetSeriesasMA(ds, 'Fc', si=si, ei=ei)
            for m in range(1, 13):
                mi = numpy.where(Month == m)[0]
                Fc_Num, Hr, Fc_Av, Sd, Mx, Mn = get_diurnalstats(
                    Hdh[mi], Fc[mi], ts)
                Fe_Num, Hr, Fe_Av, Sd, Mx, Mn = get_diurnalstats(
                    Hdh[mi], Fe[mi], ts)
                index = numpy.ma.where((Fc_Num > 4) & (Fe_Num > 4))
                WUE[:, m - 1][index] = Fc_Av[index] / Fe_Av[index]
            # reject WUE values greater than upper limit or less than lower limit
            upr, lwr = get_rangecheck_limit(cf, 'WUE')
            WUE = numpy.ma.filled(
                numpy.ma.masked_where((WUE > upr) | (WUE < lwr), WUE),
                float(c.missing_value))
            # write the WUE to the Excel file
            xlSheet = xlFile.add_sheet('WUE')
            write_data_1columnpermonth(xlSheet,
                                       WUE,
                                       ts,
                                       format_string='0.00000')
            # do the 2D interpolation to fill missing EF values
            WUEi = do_2dinterpolation(WUE)
            xlSheet = xlFile.add_sheet('WUEi')
            write_data_1columnpermonth(xlSheet,
                                       WUEi,
                                       ts,
                                       format_string='0.00000')
            # now do WUE for each day ...
            Fe, f, a = qcutils.GetSeriesasMA(ds, 'Fe', si=si, ei=ei)
            Fc, f, a = qcutils.GetSeriesasMA(ds, 'Fc', si=si, ei=ei)
            WUE = Fc / Fe
            WUE = numpy.ma.filled(
                numpy.ma.masked_where((WUE > upr) | (WUE < lwr), WUE),
                float(c.missing_value))
            WUE_daily = WUE.reshape(nDays, ntsInDay)
            xlSheet = xlFile.add_sheet('WUE(day)')
            write_data_1columnpertimestep(xlSheet,
                                          WUE_daily,
                                          ts,
                                          startdate=ldt[0],
                                          format_string='0.00000')
            WUEi = do_2dinterpolation(WUE_daily)
            xlSheet = xlFile.add_sheet('WUEi(day)')
            write_data_1columnpertimestep(xlSheet,
                                          WUEi,
                                          ts,
                                          startdate=ldt[0],
                                          format_string='0.00000')
        else:
            log.warning(" qcclim.climatology: requested variable " + ThisOne +
                        " not in data structure")
            continue
    log.info(" Saving Excel file " + xl_filename)
    xlFile.save(xl_filename)
Example #25
0
def compare_eddypro():
    epname = qcio.get_filename_dialog(
        title='Choose an EddyPro full output file')
    ofname = qcio.get_filename_dialog(title='Choose an L3 output file')

    ds_ep = qcio.read_eddypro_full(epname)
    ds_of = qcio.nc_read_series(ofname)

    dt_ep = ds_ep.series['DateTime']['Data']
    dt_of = ds_of.series['DateTime']['Data']

    start_datetime = max([dt_ep[0], dt_of[0]])
    end_datetime = min([dt_ep[-1], dt_of[-1]])

    si_of = qcutils.GetDateIndex(dt_of,
                                 str(start_datetime),
                                 ts=30,
                                 default=0,
                                 match='exact')
    ei_of = qcutils.GetDateIndex(dt_of,
                                 str(end_datetime),
                                 ts=30,
                                 default=len(dt_of),
                                 match='exact')
    si_ep = qcutils.GetDateIndex(dt_ep,
                                 str(start_datetime),
                                 ts=30,
                                 default=0,
                                 match='exact')
    ei_ep = qcutils.GetDateIndex(dt_ep,
                                 str(end_datetime),
                                 ts=30,
                                 default=len(dt_ep),
                                 match='exact')

    us_of = qcutils.GetVariableAsDictionary(ds_of, 'ustar', si=si_of, ei=ei_of)
    us_ep = qcutils.GetVariableAsDictionary(ds_ep, 'ustar', si=si_ep, ei=ei_ep)
    Fh_of = qcutils.GetVariableAsDictionary(ds_of, 'Fh', si=si_of, ei=ei_of)
    Fh_ep = qcutils.GetVariableAsDictionary(ds_ep, 'Fh', si=si_ep, ei=ei_ep)
    Fe_of = qcutils.GetVariableAsDictionary(ds_of, 'Fe', si=si_of, ei=ei_of)
    Fe_ep = qcutils.GetVariableAsDictionary(ds_ep, 'Fe', si=si_ep, ei=ei_ep)
    Fc_of = qcutils.GetVariableAsDictionary(ds_of, 'Fc', si=si_of, ei=ei_of)
    Fc_ep = qcutils.GetVariableAsDictionary(ds_ep, 'Fc', si=si_ep, ei=ei_ep)
    # copy the range check values from the OFQC attributes to the EP attributes
    for of, ep in zip([us_of, Fh_of, Fe_of, Fc_of],
                      [us_ep, Fh_ep, Fe_ep, Fc_ep]):
        for item in ["rangecheck_upper", "rangecheck_lower"]:
            if item in of["Attr"]:
                ep["Attr"][item] = of["Attr"][item]
    # apply QC to the EddyPro data
    qcck.ApplyRangeCheckToVariable(us_ep)
    qcck.ApplyRangeCheckToVariable(Fc_ep)
    qcck.ApplyRangeCheckToVariable(Fe_ep)
    qcck.ApplyRangeCheckToVariable(Fh_ep)
    # plot the comparison
    plt.ion()
    fig = plt.figure(1, figsize=(8, 8))
    qcplot.xyplot(us_ep["Data"],
                  us_of["Data"],
                  sub=[2, 2, 1],
                  regr=2,
                  xlabel='u*_EP (m/s)',
                  ylabel='u*_OF (m/s)')
    qcplot.xyplot(Fh_ep["Data"],
                  Fh_of["Data"],
                  sub=[2, 2, 2],
                  regr=2,
                  xlabel='Fh_EP (W/m2)',
                  ylabel='Fh_OF (W/m2)')
    qcplot.xyplot(Fe_ep["Data"],
                  Fe_of["Data"],
                  sub=[2, 2, 3],
                  regr=2,
                  xlabel='Fe_EP (W/m2)',
                  ylabel='Fe_OF (W/m2)')
    qcplot.xyplot(Fc_ep["Data"],
                  Fc_of["Data"],
                  sub=[2, 2, 4],
                  regr=2,
                  xlabel='Fc_EP (umol/m2/s)',
                  ylabel='Fc_OF (umol/m2/s)')
    plt.tight_layout()
    plt.draw()
    plt.ioff()
Example #26
0
def l4to6qc(cf, ds3, AttrLevel, InLevel, OutLevel):
    """
        Fill gaps in met data from other sources
        Integrate SOLO-ANN gap filled fluxes performed externally
        Generates L4 from L3 data
        Generates daily sums excel workbook
        
        Variable Series:
            Meteorological (MList): Ah_EC, Cc_7500_Av, ps, Ta_EC, Ws_CSAT, Wd_CSAT
            Radiation (RList): Fld, Flu, Fn, Fsd, Fsu
            Soil water content (SwsList): all variables containing Sws in variable name
            Soil (SList): Fg, Ts, SwsList
            Turbulent fluxes (FList): Fc_wpl, Fe_wpl, Fh, ustar
            Output (OList): MList, RList, SList, FList
        
        Parameters loaded from control file:
            zmd: z-d
            z0: roughness height
        
        Functions performed:
            qcts.AddMetVars
            qcts.ComputeDailySums
            qcts.InterpolateOverMissing (OList for gaps shorter than 3 observations, OList gaps shorter than 7 observations)
            qcts.GapFillFromAlternate (MList, RList)
            qcts.GapFillFromClimatology (Ah_EC, Fn, Fg, ps, Ta_EC, Ws_CSAT, OList)
            qcts.GapFillFromRatios (Fe, Fh, Fc)
            qcts.ReplaceOnDiff (Ws_CSAT, ustar)
            qcts.UstarFromFh
            qcts.ReplaceWhereMissing (Ustar)
            qcck.do_qcchecks
        """
    if AttrLevel == 'False':
        ds3.globalattributes['Functions'] = ''
        AttrLevel = InLevel
    # check to ensure L4 functions are defined in controlfile
    if qcutils.cfkeycheck(cf, Base='Functions'):
        x = 0
        y = 0
        z = 0
    else:
        log.error('FunctionList not found in control file')
        ds3x = copy.deepcopy(ds3)
        ds3x.globalattributes['nc_level'] = 'L3'
        ds3x.globalattributes['L4Functions'] = 'No L4-L6 functions applied'
        return ds3x

    # handle meta-data and import L4-L6 from external process
    if InLevel == 'L3':
        ds3x = copy.deepcopy(ds3)
    else:
        infilename = qcio.get_infilename_from_cf(cf, InLevel)
        ds3x = qcio.nc_read_series(infilename)

        for ThisOne in ds3.globalattributes.keys():
            if ThisOne not in ds3x.globalattributes.keys():
                ds3x.globalattributes[ThisOne] = ds3.globalattributes[ThisOne]

        for ThisOne in ds3.series.keys():
            if ThisOne in ds3x.series.keys():
                for attr in ds3.series[ThisOne]['Attr'].keys():
                    if attr not in [
                            'ancillary_variables', 'long_name',
                            'standard_name', 'units'
                    ]:
                        ds3x.series[ThisOne]['Attr'][attr] = ds3.series[
                            ThisOne]['Attr'][attr]

        ds3x.globalattributes['nc_level'] = AttrLevel
        ds3x.globalattributes['EPDversion'] = sys.version
        ds3x.globalattributes['QC_version_history'] = cfg.__doc__
        # put the control file name into the global attributes
        ds3x.globalattributes['controlfile_name'] = cf['controlfile_name']
        if OutLevel == 'L6':
            ds3x.globalattributes['xlL6_datemode'] = ds3x.globalattributes[
                'xl_datemode']
            ds3x.globalattributes['xl_datemode'] = ds3.globalattributes[
                'xl_datemode']
            ds3x.globalattributes['xlL6_filename'] = ds3x.globalattributes[
                'xl_filename']
            ds3x.globalattributes['xl_filename'] = ds3.globalattributes[
                'xl_filename']
            ds3x.globalattributes['xlL6_moddatetime'] = ds3x.globalattributes[
                'xl_moddatetime']
            ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes[
                'xl_moddatetime']
        elif OutLevel == 'L5':
            ds3x.globalattributes['xlL5_datemode'] = ds3x.globalattributes[
                'xl_datemode']
            ds3x.globalattributes['xl_datemode'] = ds3.globalattributes[
                'xl_datemode']
            ds3x.globalattributes['xlL5_filename'] = ds3x.globalattributes[
                'xl_filename']
            ds3x.globalattributes['xl_filename'] = ds3.globalattributes[
                'xl_filename']
            ds3x.globalattributes['xlL5_moddatetime'] = ds3x.globalattributes[
                'xl_moddatetime']
            ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes[
                'xl_moddatetime']
        elif OutLevel == 'L4':
            ds3x.globalattributes['xlL4_datemode'] = ds3x.globalattributes[
                'xl_datemode']
            ds3x.globalattributes['xl_datemode'] = ds3.globalattributes[
                'xl_datemode']
            ds3x.globalattributes['xlL4_filename'] = ds3x.globalattributes[
                'xl_filename']
            ds3x.globalattributes['xl_filename'] = ds3.globalattributes[
                'xl_filename']
            ds3x.globalattributes['xlL4_moddatetime'] = ds3x.globalattributes[
                'xl_moddatetime']
            ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes[
                'xl_moddatetime']

        qcutils.prepOzFluxVars(cf, ds3x)
        # convert Fc [mgCO2 m-2 s-1] to Fc_co2 [mgCO2 m-2 s-1], Fc_c [mgC m-2 s-1], NEE [umol m-2 s-1] and NEP = - NEE
        if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='convertFc'
                              ) and cf['Functions']['convertFc'] == 'True':
            try:
                ds3x.globalattributes['L4Functions'] = ds3x.globalattributes[
                    'L4Functions'] + ', convertFc'
            except:
                ds3x.globalattributes['L4Functions'] = 'convertFc'
            if 'Fc_co2' in ds3x.series.keys():
                qcts.ConvertFc(cf, ds3x, Fco2_in='Fc_co2')
            else:
                qcts.ConvertFc(cf, ds3x)

    ds4x = copy.deepcopy(ds3x)
    for ThisOne in ['NEE', 'NEP', 'Fc', 'Fc_co2', 'Fc_c', 'Fe', 'Fh']:
        if ThisOne in ds4x.series.keys() and ThisOne in ds3.series.keys():
            ds4x.series[ThisOne] = ds3.series[ThisOne].copy()
    for ThisOne in [
            'GPP', 'CE', 'ER_night', 'ER_dark', 'CE_day', 'CE_NEEmax',
            'ER_bio', 'PD', 'ER_n', 'ER_LRF'
    ]:
        if ThisOne in ds4x.series.keys():
            ds4x.series[ThisOne]['Data'] = numpy.ones(
                len(ds4x.series[ThisOne]['Data']),
                dtype=numpy.float64) * numpy.float64(c.missing_value)
            ds4x.series[ThisOne]['Flag'] = numpy.ones(len(
                ds4x.series[ThisOne]['Data']),
                                                      dtype=numpy.int32)
    if InLevel == 'L4' or AttrLevel == 'L3':
        ds4, x = l4qc(cf, ds4x, InLevel, x)
        qcutils.get_coverage_individual(ds4)
        qcutils.get_coverage_groups(ds4)
        if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='FlagStats'
                              ) and cf['Functions']['FlagStats'] == 'True':
            qcio.get_seriesstats(cf, ds4)
    if OutLevel == 'L5' or OutLevel == 'L6':
        try:
            ds4y = copy.deepcopy(ds4)
        except:
            ds4y = copy.deepcopy(ds4x)
        for ThisOne in [
                'NEE', 'NEP', 'Fc', 'Fc_c', 'Fc_co2', 'Fc_c', 'Fe', 'Fh'
        ]:
            var, var_flag, var_attr = qcutils.GetSeriesasMA(ds3x, ThisOne)
            qcutils.CreateSeries(ds4y,
                                 ThisOne,
                                 var,
                                 Flag=var_flag,
                                 Attr=var_attr)
            ds4y.series[ThisOne]['Attr']['long_name'] = var_attr['long_name']
        ds5, y = l5qc(cf, ds4y, y)
        qcutils.get_coverage_individual(ds5)
        qcutils.get_coverage_groups(ds5)
        if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='FlagStats'
                              ) and cf['Functions']['FlagStats'] == 'True':
            qcio.get_seriesstats(cf, ds5)
    if OutLevel == 'L6':
        ds5z = copy.deepcopy(ds5)
        for ThisOne in [
                'GPP', 'CE', 'ER_night', 'ER_dark', 'CE_day', 'CE_NEEmax',
                'ER_bio', 'PD', 'ER_n', 'ER_LRF'
        ]:
            if ThisOne in ds3x.series.keys():
                ds5z.series[ThisOne] = ds3x.series[ThisOne].copy()
        ds6, z = l6qc(cf, ds5z, z)
        qcutils.get_coverage_individual(ds6)
        qcutils.get_coverage_groups(ds6)
        if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='FlagStats'
                              ) and cf['Functions']['FlagStats'] == 'True':
            qcio.get_seriesstats(cf, ds6)

    # calculate daily statistics
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='Sums'):
        if cf['Functions']['Sums'] == 'L6':
            ds6.globalattributes[
                'Functions'] = ds6.globalattributes['Functions'] + ', Sums'
            try:
                ds6.globalattributes['L6Functions'] = ds6.globalattributes[
                    'L6Functions'] + ', Sums'
            except:
                ds6.globalattributes['L6Functions'] = 'Sums'

            qcts.do_sums(cf, ds6)

        elif cf['Functions']['Sums'] == 'L5':
            ds5.globalattributes[
                'Functions'] = ds5.globalattributes['Functions'] + ', Sums'
            try:
                ds5.globalattributes['L5Functions'] = ds5.globalattributes[
                    'L5Functions'] + ', Sums'
            except:
                ds5.globalattributes['L5Functions'] = 'Sums'

            qcts.do_sums(cf, ds5)

        elif cf['Functions']['Sums'] == 'L4':
            ds4.globalattributes[
                'Functions'] = ds4.globalattributes['Functions'] + ', Sums'
            try:
                ds4.globalattributes['L4Functions'] = ds4.globalattributes[
                    'L5Functions'] + ', Sums'
            except:
                ds4.globalattributes['L4Functions'] = 'Sums'

            qcts.do_sums(cf, ds4)

    # compute climatology
    if qcutils.cfkeycheck(cf, Base='Functions', ThisOne='climatology'):
        if cf['Functions']['climatology'] == 'L6':
            ds6.globalattributes['Functions'] = ds6.globalattributes[
                'Functions'] + ', climatology'
            try:
                ds6.globalattributes['L6Functions'] = ds6.globalattributes[
                    'L6Functions'] + ', climatology'
            except:
                ds6.globalattributes['L6Functions'] = 'climatology'

            qcts.do_climatology(cf, ds6)

        elif cf['Functions']['climatology'] == 'L5':
            ds5.globalattributes['Functions'] = ds5.globalattributes[
                'Functions'] + ', climatology'
            try:
                ds5.globalattributes['L5Functions'] = ds5.globalattributes[
                    'L5Functions'] + ', climatology'
            except:
                ds5.globalattributes['L5Functions'] = 'climatology'

            qcts.do_climatology(cf, ds5)

        elif cf['Functions']['climatology'] == 'L4':
            ds4.globalattributes['Functions'] = ds4.globalattributes[
                'Functions'] + ', climatology'
            try:
                ds4.globalattributes['L4Functions'] = ds4.globalattributes[
                    'L4Functions'] + ', climatology'
            except:
                ds4.globalattributes['L4Functions'] = 'climatology'

            qcts.do_climatology(cf, ds4)

    if OutLevel == 'L4' and (InLevel == 'L3' or InLevel == 'L4'):
        if x == 0:
            ds4.globalattributes['Functions'] = ds4.globalattributes[
                'Functions'] + ', No further L4 gapfilling'
            try:
                ds4.globalattributes['L4Functions'] = ds4.globalattributes[
                    'L4Functions'] + ', No further L4 gapfilling'
            except:
                ds4.globalattributes[
                    'L4Functions'] = 'No further L4 gapfilling'

            log.warn('  L4:  no record of gapfilling functions')
        return ds4
    elif OutLevel == 'L5':
        if x == 0:
            if InLevel == 'L3' or InLevel == 'L4':
                ds4.globalattributes['Functions'] = ds4.globalattributes[
                    'Functions'] + ', No further L4 gapfilling'
                try:
                    ds4.globalattributes['L4Functions'] = ds4.globalattributes[
                        'L4Functions'] + ', No further L4 gapfilling'
                except:
                    ds4.globalattributes[
                        'L4Functions'] = 'No further L4 gapfilling'
                log.warn('  L4:  no record of gapfilling functions')
            ds5.globalattributes['Functions'] = ds5.globalattributes[
                'Functions'] + ', No further L4 gapfilling'
            try:
                ds5.globalattributes['L4Functions'] = ds5.globalattributes[
                    'L4Functions'] + ', No further L4 gapfilling'
            except:
                ds5.globalattributes[
                    'L4Functions'] = 'No further L4 gapfilling'
        if y == 0:
            ds5.globalattributes['Functions'] = ds5.globalattributes[
                'Functions'] + ', No further L5 gapfilling'
            try:
                ds5.globalattributes['L5Functions'] = ds5.globalattributes[
                    'L5Functions'] + ', No further L5 gapfilling'
            except:
                ds5.globalattributes[
                    'L5Functions'] = 'No further L5 gapfilling'

            log.warn('  L5:  no record of gapfilling functions')
        return ds4, ds5
    elif OutLevel == 'L6':
        if x == 0:
            if InLevel == 'L3' or InLevel == 'L4':
                ds4.globalattributes['Functions'] = ds4.globalattributes[
                    'Functions'] + ', No further L4 gapfilling'
                try:
                    ds4.globalattributes['L4Functions'] = ds4.globalattributes[
                        'L4Functions'] + ', No further L4 gapfilling'
                except:
                    ds4.globalattributes[
                        'L4Functions'] = 'No further L4 gapfilling'
                log.warn('  L4:  no record of gapfilling functions')
            if InLevel == 'L3' or InLevel == 'L4' or InLevel == 'L5':
                ds5.globalattributes['Functions'] = ds5.globalattributes[
                    'Functions'] + ', No further L4 gapfilling'
                try:
                    ds5.globalattributes['L4Functions'] = ds5.globalattributes[
                        'L4Functions'] + ', No further L4 gapfilling'
                except:
                    ds5.globalattributes[
                        'L4Functions'] = 'No further L4 gapfilling'
                log.warn('  L4:  no record of gapfilling functions')
            ds6.globalattributes['Functions'] = ds6.globalattributes[
                'Functions'] + ', No further L4 gapfilling'
            try:
                ds6.globalattributes['L4Functions'] = ds6.globalattributes[
                    'L4Functions'] + ', No further L4 gapfilling'
            except:
                ds6.globalattributes[
                    'L4Functions'] = 'No further L4 gapfilling'

        if y == 0:
            if InLevel == 'L3' or InLevel == 'L4' or InLevel == 'L5':
                ds5.globalattributes['Functions'] = ds5.globalattributes[
                    'Functions'] + ', No further L5 gapfilling'
                try:
                    ds5.globalattributes['L5Functions'] = ds5.globalattributes[
                        'L5Functions'] + ', No further L5 gapfilling'
                except:
                    ds5.globalattributes[
                        'L5Functions'] = 'No further L5 gapfilling'
                log.warn('  L5:  no record of gapfilling functions')
            ds6.globalattributes['Functions'] = ds6.globalattributes[
                'Functions'] + ', No further L5 gapfilling'
            try:
                ds6.globalattributes['L5Functions'] = ds6.globalattributes[
                    'L5Functions'] + ', No further L5 gapfilling'
            except:
                ds6.globalattributes[
                    'L5Functions'] = 'No further L5 gapfilling'
        if z == 0:
            ds6.globalattributes['Functions'] = ds6.globalattributes[
                'Functions'] + ', No further L6 partitioning'
            try:
                ds6.globalattributes['L6Functions'] = ds5.globalattributes[
                    'L6Functions'] + ', No further L6 partitioning'
            except:
                ds6.globalattributes[
                    'L6Functions'] = 'No further L6 partitioning'
            log.warn('  L6:  no record of gapfilling functions')
        return ds4, ds5, ds6
Example #27
0
 if level.lower()=="l1":
     # L1 processing
     for i in cf_batch["Levels"][level].keys():
         cfname = cf_batch["Levels"][level][i]
         logging.info('Starting L1 processing with '+cfname)
         cf = qcio.get_controlfilecontents(cfname)
         qcio.xl2nc(cf,'L1')
         logging.info('Finished L1 processing with '+cfname)
 elif level.lower()=="l2":
     # L2 processing
     for i in cf_batch["Levels"][level].keys():
         cfname = cf_batch["Levels"][level][i]
         logging.info('Starting L2 processing with '+cfname)
         cf = qcio.get_controlfilecontents(cfname)
         infilename = qcio.get_infilenamefromcf(cf)
         ds1 = qcio.nc_read_series(infilename)
         ds2 = qcls.l2qc(cf,ds1)
         outfilename = qcio.get_outfilenamefromcf(cf)
         ncFile = qcio.nc_open_write(outfilename)
         qcio.nc_write_series(ncFile,ds2)
         logging.info('Finished L2 processing with '+cfname)
 elif level.lower()=="l3":
     # L3 processing
     for i in cf_batch["Levels"][level].keys():
         cfname = cf_batch["Levels"][level][i]
         logging.info('Starting L3 processing with '+cfname)
         cf = qcio.get_controlfilecontents(cfname)
         infilename = qcio.get_infilenamefromcf(cf)
         ds2 = qcio.nc_read_series(infilename)
         ds3 = qcls.l3qc(cf,ds2)
         outfilename = qcio.get_outfilenamefromcf(cf)
Example #28
0
         logging.info('Starting L1 processing with '+cfname)
         cf = qcio.get_controlfilecontents(cfname)
         ds1 = qcls.l1qc(cf)
         outfilename = qcio.get_outfilenamefromcf(cf)
         ncFile = qcio.nc_open_write(outfilename)
         qcio.nc_write_series(ncFile,ds1)
         logging.info('Finished L1 processing with '+cfname)
         logging.info('')
 elif level.lower()=="l2":
     # L2 processing
     for i in cf_batch["Levels"][level].keys():
         cfname = cf_batch["Levels"][level][i]
         logging.info('Starting L2 processing with '+cfname)
         cf = qcio.get_controlfilecontents(cfname)
         infilename = qcio.get_infilenamefromcf(cf)
         ds1 = qcio.nc_read_series(infilename)
         ds2 = qcls.l2qc(cf,ds1)
         outfilename = qcio.get_outfilenamefromcf(cf)
         ncFile = qcio.nc_open_write(outfilename)
         qcio.nc_write_series(ncFile,ds2)
         logging.info('Finished L2 processing with '+cfname)
         logging.info('')
 elif level.lower()=="l3":
     # L3 processing
     for i in cf_batch["Levels"][level].keys():
         cfname = cf_batch["Levels"][level][i]
         logging.info('Starting L3 processing with '+cfname)
         cf = qcio.get_controlfilecontents(cfname)
         infilename = qcio.get_infilenamefromcf(cf)
         ds2 = qcio.nc_read_series(infilename)
         ds3 = qcls.l3qc(cf,ds2)
Example #29
0
        a, _, _, _ = numpy.linalg.lstsq(x, y)
        eqnstr = 'y = %.3fx'%(a)
        plt.text(0.5,0.875,eqnstr,fontsize=8,horizontalalignment='center',transform=ax.transAxes)
    plt.subplots_adjust(wspace=wspace,hspace=hspace)

# get the control file
cf = qcio.load_controlfile(path='../controlfiles')
if len(cf)==0: sys.exit()
# get the netCDF filename
ncfilename = qcio.get_infilenamefromcf(cf)
# get the Fsdand ustar thresholds
Fsd_lower = float(cf['Params']['Fsd_lower'])
Fsd_upper = float(cf['Params']['Fsd_upper'])
ustar_threshold = float(cf['Params']['ustar_threshold'])
# read the netCDF file
ds3 = qcio.nc_read_series(ncfilename)
if len(ds3.series.keys())==0: print time.strftime('%X')+' netCDF file '+ncfilename+' not found'; sys.exit()
SiteName = ds3.globalattributes['site_name']
DateTime = ds3.series['DateTime']['Data']
PlotTitle = SiteName + ': ' + str(DateTime[0]) + ' to ' + str(DateTime[-1])

# first figure is general plots of Fc as a function of ustar, Ts and Sws
# get the data as masked arrays
Fc,f,a=qcutils.GetSeriesasMA(ds3,'Fc')
Fc_units = ds3.series['Fc']['Attr']['units']
us,f,a=qcutils.GetSeriesasMA(ds3,'ustar')
Fsd,f,a=qcutils.GetSeriesasMA(ds3,'Fsd')
nFig = 1
fig = plt.figure(nFig,figsize=(8,8))
plt.figtext(0.5,0.95,PlotTitle,horizontalalignment='center',size=16)
# scatter plot of Fc versus ustar, night time
Example #30
0
def l4to6qc(cf,ds3,AttrLevel,InLevel,OutLevel):
    """
        Fill gaps in met data from other sources
        Integrate SOLO-ANN gap filled fluxes performed externally
        Generates L4 from L3 data
        Generates daily sums excel workbook
        
        Variable Series:
            Meteorological (MList): Ah_EC, Cc_7500_Av, ps, Ta_EC, Ws_CSAT, Wd_CSAT
            Radiation (RList): Fld, Flu, Fn, Fsd, Fsu
            Soil water content (SwsList): all variables containing Sws in variable name
            Soil (SList): Fg, Ts, SwsList
            Turbulent fluxes (FList): Fc_wpl, Fe_wpl, Fh, ustar
            Output (OList): MList, RList, SList, FList
        
        Parameters loaded from control file:
            zmd: z-d
            z0: roughness height
        
        Functions performed:
            qcts.AddMetVars
            qcts.ComputeDailySums
            qcts.InterpolateOverMissing (OList for gaps shorter than 3 observations, OList gaps shorter than 7 observations)
            qcts.GapFillFromAlternate (MList, RList)
            qcts.GapFillFromClimatology (Ah_EC, Fn, Fg, ps, Ta_EC, Ws_CSAT, OList)
            qcts.GapFillFromRatios (Fe, Fh, Fc)
            qcts.ReplaceOnDiff (Ws_CSAT, ustar)
            qcts.UstarFromFh
            qcts.ReplaceWhereMissing (Ustar)
            qcck.do_qcchecks
        """
    if AttrLevel == 'False':
        ds3.globalattributes['Functions'] = ''
        AttrLevel = InLevel
    # check to ensure L4 functions are defined in controlfile
    if qcutils.cfkeycheck(cf,Base='Functions'):
        x=0
        y=0
        z=0
    else:
        log.error('FunctionList not found in control file')
        ds3x = copy.deepcopy(ds3)
        ds3x.globalattributes['nc_level'] = 'L3'
        ds3x.globalattributes['L4Functions'] = 'No L4-L6 functions applied'
        return ds3x
    
    # handle meta-data and import L4-L6 from external process
    if InLevel == 'L3':
        ds3x = copy.deepcopy(ds3)
    else:
        infilename = qcio.get_infilename_from_cf(cf,InLevel)
        ds3x = qcio.nc_read_series(infilename)
        
        for ThisOne in ds3.globalattributes.keys():
            if ThisOne not in ds3x.globalattributes.keys():
                ds3x.globalattributes[ThisOne] = ds3.globalattributes[ThisOne]
        
        for ThisOne in ds3.series.keys():
            if ThisOne in ds3x.series.keys():
                for attr in ds3.series[ThisOne]['Attr'].keys():
                    if attr not in ['ancillary_variables','long_name','standard_name','units']:
                        ds3x.series[ThisOne]['Attr'][attr] = ds3.series[ThisOne]['Attr'][attr]
        
        ds3x.globalattributes['nc_level'] = AttrLevel
        ds3x.globalattributes['EPDversion'] = sys.version
        ds3x.globalattributes['QC_version_history'] = cfg.__doc__
        # put the control file name into the global attributes
        ds3x.globalattributes['controlfile_name'] = cf['controlfile_name']
        if OutLevel == 'L6':
            ds3x.globalattributes['xlL6_datemode'] = ds3x.globalattributes['xl_datemode']
            ds3x.globalattributes['xl_datemode'] = ds3.globalattributes['xl_datemode']
            ds3x.globalattributes['xlL6_filename'] = ds3x.globalattributes['xl_filename']
            ds3x.globalattributes['xl_filename'] = ds3.globalattributes['xl_filename']
            ds3x.globalattributes['xlL6_moddatetime'] = ds3x.globalattributes['xl_moddatetime']
            ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes['xl_moddatetime']
        elif OutLevel == 'L5':
            ds3x.globalattributes['xlL5_datemode'] = ds3x.globalattributes['xl_datemode']
            ds3x.globalattributes['xl_datemode'] = ds3.globalattributes['xl_datemode']
            ds3x.globalattributes['xlL5_filename'] = ds3x.globalattributes['xl_filename']
            ds3x.globalattributes['xl_filename'] = ds3.globalattributes['xl_filename']
            ds3x.globalattributes['xlL5_moddatetime'] = ds3x.globalattributes['xl_moddatetime']
            ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes['xl_moddatetime']
        elif OutLevel == 'L4':
            ds3x.globalattributes['xlL4_datemode'] = ds3x.globalattributes['xl_datemode']
            ds3x.globalattributes['xl_datemode'] = ds3.globalattributes['xl_datemode']
            ds3x.globalattributes['xlL4_filename'] = ds3x.globalattributes['xl_filename']
            ds3x.globalattributes['xl_filename'] = ds3.globalattributes['xl_filename']
            ds3x.globalattributes['xlL4_moddatetime'] = ds3x.globalattributes['xl_moddatetime']
            ds3x.globalattributes['xl_moddatetime'] = ds3.globalattributes['xl_moddatetime']
        
        qcutils.prepOzFluxVars(cf,ds3x)
        # convert Fc [mgCO2 m-2 s-1] to Fc_co2 [mgCO2 m-2 s-1], Fc_c [mgC m-2 s-1], NEE [umol m-2 s-1] and NEP = - NEE
        if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='convertFc') and cf['Functions']['convertFc'] == 'True':
            try:
                ds3x.globalattributes['L4Functions'] = ds3x.globalattributes['L4Functions']+', convertFc'
            except:
                ds3x.globalattributes['L4Functions'] = 'convertFc'
            if 'Fc_co2' in ds3x.series.keys():
                qcts.ConvertFc(cf,ds3x,Fco2_in='Fc_co2')
            else:
                qcts.ConvertFc(cf,ds3x)
    
    ds4x = copy.deepcopy(ds3x)
    for ThisOne in ['NEE','NEP','Fc','Fc_co2','Fc_c','Fe','Fh']:
        if ThisOne in ds4x.series.keys() and ThisOne in ds3.series.keys():
            ds4x.series[ThisOne] = ds3.series[ThisOne].copy()
    for ThisOne in ['GPP','CE','ER_night','ER_dark','CE_day','CE_NEEmax','ER_bio','PD','ER_n','ER_LRF']:
        if ThisOne in ds4x.series.keys():
            ds4x.series[ThisOne]['Data'] = numpy.ones(len(ds4x.series[ThisOne]['Data']),dtype=numpy.float64) * numpy.float64(c.missing_value)
            ds4x.series[ThisOne]['Flag'] = numpy.ones(len(ds4x.series[ThisOne]['Data']), dtype=numpy.int32)
    if InLevel == 'L4' or AttrLevel == 'L3':
        ds4,x = l4qc(cf,ds4x,InLevel,x)
        qcutils.get_coverage_individual(ds4)
        qcutils.get_coverage_groups(ds4)
        if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='FlagStats') and cf['Functions']['FlagStats'] == 'True':
            qcio.get_seriesstats(cf,ds4)
    if OutLevel == 'L5' or OutLevel == 'L6':
        try:
            ds4y = copy.deepcopy(ds4)
        except:
            ds4y = copy.deepcopy(ds4x)
        for ThisOne in ['NEE','NEP','Fc','Fc_c','Fc_co2','Fc_c','Fe','Fh']:
            var, var_flag, var_attr = qcutils.GetSeriesasMA(ds3x,ThisOne)
            qcutils.CreateSeries(ds4y,ThisOne,var,Flag=var_flag,Attr=var_attr)
            ds4y.series[ThisOne]['Attr']['long_name'] = var_attr['long_name']
        ds5,y = l5qc(cf,ds4y,y)
        qcutils.get_coverage_individual(ds5)
        qcutils.get_coverage_groups(ds5)
        if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='FlagStats') and cf['Functions']['FlagStats'] == 'True':
            qcio.get_seriesstats(cf,ds5)
    if OutLevel == 'L6':
        ds5z = copy.deepcopy(ds5)
        for ThisOne in ['GPP','CE','ER_night','ER_dark','CE_day','CE_NEEmax','ER_bio','PD','ER_n','ER_LRF']:
            if ThisOne in ds3x.series.keys():
                ds5z.series[ThisOne] = ds3x.series[ThisOne].copy()
        ds6,z = l6qc(cf,ds5z,z)
        qcutils.get_coverage_individual(ds6)
        qcutils.get_coverage_groups(ds6)
        if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='FlagStats') and cf['Functions']['FlagStats'] == 'True':
            qcio.get_seriesstats(cf,ds6)
    
    # calculate daily statistics
    if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='Sums'):
        if cf['Functions']['Sums'] == 'L6':
            ds6.globalattributes['Functions'] = ds6.globalattributes['Functions']+', Sums'
            try:
                ds6.globalattributes['L6Functions'] = ds6.globalattributes['L6Functions']+', Sums'
            except:
                ds6.globalattributes['L6Functions'] = 'Sums'
            
            qcts.do_sums(cf,ds6)
        
        elif cf['Functions']['Sums'] == 'L5':
            ds5.globalattributes['Functions'] = ds5.globalattributes['Functions']+', Sums'
            try:
                ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions']+', Sums'
            except:
                ds5.globalattributes['L5Functions'] = 'Sums'
            
            qcts.do_sums(cf,ds5)
        
        elif cf['Functions']['Sums'] == 'L4':
            ds4.globalattributes['Functions'] = ds4.globalattributes['Functions']+', Sums'
            try:
                ds4.globalattributes['L4Functions'] = ds4.globalattributes['L5Functions']+', Sums'
            except:
                ds4.globalattributes['L4Functions'] = 'Sums'
            
            qcts.do_sums(cf,ds4)
        
    
    # compute climatology
    if qcutils.cfkeycheck(cf,Base='Functions',ThisOne='climatology'):
        if cf['Functions']['climatology'] == 'L6':
            ds6.globalattributes['Functions'] = ds6.globalattributes['Functions']+', climatology'
            try:
                ds6.globalattributes['L6Functions'] = ds6.globalattributes['L6Functions']+', climatology'
            except:
                ds6.globalattributes['L6Functions'] = 'climatology'
            
            qcts.do_climatology(cf,ds6)
        
        elif cf['Functions']['climatology'] == 'L5':
            ds5.globalattributes['Functions'] = ds5.globalattributes['Functions']+', climatology'
            try:
                ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions']+', climatology'
            except:
                ds5.globalattributes['L5Functions'] = 'climatology'
            
            qcts.do_climatology(cf,ds5)
        
        elif cf['Functions']['climatology'] == 'L4':
            ds4.globalattributes['Functions'] = ds4.globalattributes['Functions']+', climatology'
            try:
                ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions']+', climatology'
            except:
                ds4.globalattributes['L4Functions'] = 'climatology'
            
            qcts.do_climatology(cf,ds4)
        
    if OutLevel == 'L4' and (InLevel == 'L3' or InLevel == 'L4'):
        if x == 0:
            ds4.globalattributes['Functions'] = ds4.globalattributes['Functions'] + ', No further L4 gapfilling'
            try:
                ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions'] + ', No further L4 gapfilling'
            except:
                ds4.globalattributes['L4Functions'] = 'No further L4 gapfilling'
            
            log.warn('  L4:  no record of gapfilling functions')
        return ds4
    elif OutLevel == 'L5':
        if x == 0:
            if InLevel == 'L3' or InLevel == 'L4':
                ds4.globalattributes['Functions'] = ds4.globalattributes['Functions'] + ', No further L4 gapfilling'
                try:
                    ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions'] + ', No further L4 gapfilling'
                except:
                    ds4.globalattributes['L4Functions'] = 'No further L4 gapfilling'
                log.warn('  L4:  no record of gapfilling functions')
            ds5.globalattributes['Functions'] = ds5.globalattributes['Functions'] + ', No further L4 gapfilling'
            try:
                ds5.globalattributes['L4Functions'] = ds5.globalattributes['L4Functions'] + ', No further L4 gapfilling'
            except:
                ds5.globalattributes['L4Functions'] = 'No further L4 gapfilling'
        if y == 0:
            ds5.globalattributes['Functions'] = ds5.globalattributes['Functions'] + ', No further L5 gapfilling'
            try:
                ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions'] + ', No further L5 gapfilling'
            except:
                ds5.globalattributes['L5Functions'] = 'No further L5 gapfilling'
            
            log.warn('  L5:  no record of gapfilling functions')
        return ds4,ds5
    elif OutLevel == 'L6':
        if x == 0:
            if InLevel == 'L3' or InLevel == 'L4':
                ds4.globalattributes['Functions'] = ds4.globalattributes['Functions'] + ', No further L4 gapfilling'
                try:
                    ds4.globalattributes['L4Functions'] = ds4.globalattributes['L4Functions'] + ', No further L4 gapfilling'
                except:
                    ds4.globalattributes['L4Functions'] = 'No further L4 gapfilling'
                log.warn('  L4:  no record of gapfilling functions')
            if InLevel == 'L3' or InLevel == 'L4' or InLevel == 'L5':
                ds5.globalattributes['Functions'] = ds5.globalattributes['Functions'] + ', No further L4 gapfilling'
                try:
                    ds5.globalattributes['L4Functions'] = ds5.globalattributes['L4Functions'] + ', No further L4 gapfilling'
                except:
                    ds5.globalattributes['L4Functions'] = 'No further L4 gapfilling'
                log.warn('  L4:  no record of gapfilling functions')
            ds6.globalattributes['Functions'] = ds6.globalattributes['Functions'] + ', No further L4 gapfilling'
            try:
                ds6.globalattributes['L4Functions'] = ds6.globalattributes['L4Functions'] + ', No further L4 gapfilling'
            except:
                ds6.globalattributes['L4Functions'] = 'No further L4 gapfilling'
        
        if y == 0:
            if InLevel == 'L3' or InLevel == 'L4' or InLevel == 'L5':
                ds5.globalattributes['Functions'] = ds5.globalattributes['Functions'] + ', No further L5 gapfilling'
                try:
                    ds5.globalattributes['L5Functions'] = ds5.globalattributes['L5Functions'] + ', No further L5 gapfilling'
                except:
                    ds5.globalattributes['L5Functions'] = 'No further L5 gapfilling'
                log.warn('  L5:  no record of gapfilling functions')
            ds6.globalattributes['Functions'] = ds6.globalattributes['Functions'] + ', No further L5 gapfilling'
            try:
                ds6.globalattributes['L5Functions'] = ds6.globalattributes['L5Functions'] + ', No further L5 gapfilling'
            except:
                ds6.globalattributes['L5Functions'] = 'No further L5 gapfilling'
        if z == 0:
            ds6.globalattributes['Functions'] = ds6.globalattributes['Functions'] + ', No further L6 partitioning'
            try:
                ds6.globalattributes['L6Functions'] = ds5.globalattributes['L6Functions'] + ', No further L6 partitioning'
            except:
                ds6.globalattributes['L6Functions'] = 'No further L6 partitioning'
            log.warn('  L6:  no record of gapfilling functions')
        return ds4,ds5,ds6