示例#1
0
 def do_l6qc(self):
     """
         Call qcls.l6qc function to partition NEE into GPP and ER.
     """
     logging.info(" Starting L6 processing ...")
     cf = qcio.load_controlfile(path='controlfiles')
     if len(cf)==0: self.do_progress(text='Waiting for input ...'); return
     infilename = qcio.get_infilenamefromcf(cf)
     if len(infilename)==0: self.do_progress(text='An error occurred, check the console ...'); return
     if not qcutils.file_exists(infilename): self.do_progress(text='An error occurred, check the console ...'); return
     ds5 = qcio.nc_read_series(infilename)
     if len(ds5.series.keys())==0: self.do_progress(text='An error occurred, check the console ...'); del ds5; return
     ds5.globalattributes['controlfile_name'] = cf['controlfile_name']
     self.update_startenddate(str(ds5.series['DateTime']['Data'][0]),
                              str(ds5.series['DateTime']['Data'][-1]))
     sitename = ds5.globalattributes['site_name']
     self.do_progress(text='Doing L6 partitioning: '+sitename+' ...')
     if "Options" not in cf: cf["Options"]={}
     cf["Options"]["call_mode"] = "interactive"
     ds6 = qcls.l6qc(cf,ds5)
     self.do_progress(text='Finished L6: '+sitename)
     logging.info(' Finished L6: '+sitename)
     self.do_progress(text='Saving L6 partitioned data ...')           # put up the progress message
     outfilename = qcio.get_outfilenamefromcf(cf)
     if len(outfilename)==0: self.do_progress(text='An error occurred, check the console ...'); return
     ncFile = qcio.nc_open_write(outfilename)
     outputlist = qcio.get_outputlistfromcf(cf,'nc')
     qcio.nc_write_series(ncFile,ds6,outputlist=outputlist)             # save the L6 data
     self.do_progress(text='Finished saving L6 partitioned data')      # tell the user we are done
     logging.info(' Finished saving L6 partitioned data')
     logging.info("")
示例#2
0
 def do_l4qc(self):
     """
         Call qcls.l4qc_gapfill function
         Performs L4 gap filling on L3 met data
         or
         Ingests L4 gap filled fluxes performed in external SOLO-ANN and c
             omputes daily sums
         Outputs L4 netCDF file to ncData folder
         Outputs L4 netCDF file to OzFlux folder
         
         ControlFiles:
             L4_year.txt
             or
             L4b.txt
         
         ControlFile contents (see ControlFile/Templates/L4.txt and
         ControlFile/Templates/L4b.txt for examples):
             [General]:
                 Python control parameters (SOLO)
                 Site characteristics parameters (Gap filling)
             [Files]:
                 L3 input file name and path (Gap filling)
                 L4 input file name and path (SOLO)
                 L4 output file name and ncData folder path (both)
                 L4 OzFlux output file name and OzFlux folder path
             [Variables]:
                 Variable subset list for OzFlux output file (where
                     available)
         """
     logging.info(" Starting L4 processing ...")
     cf = qcio.load_controlfile(path='controlfiles')
     if len(cf)==0: self.do_progress(text='Waiting for input ...'); return
     infilename = qcio.get_infilenamefromcf(cf)
     if len(infilename)==0: self.do_progress(text='An error occurred, check the console ...'); return
     if not qcutils.file_exists(infilename): self.do_progress(text='An error occurred, check the console ...'); return
     ds3 = qcio.nc_read_series(infilename)
     if len(ds3.series.keys())==0: self.do_progress(text='An error occurred, check the console ...'); del ds3; return
     ds3.globalattributes['controlfile_name'] = cf['controlfile_name']
     self.update_startenddate(str(ds3.series['DateTime']['Data'][0]),
                              str(ds3.series['DateTime']['Data'][-1]))
     sitename = ds3.globalattributes['site_name']
     self.do_progress(text='Doing L4 gap filling drivers: '+sitename+' ...')
     if "Options" not in cf: cf["Options"]={}
     cf["Options"]["call_mode"] = "interactive"
     ds4 = qcls.l4qc(cf,ds3)
     if ds4.returncodes["alternate"]=="quit" or ds4.returncodes["solo"]=="quit":
         self.do_progress(text='Quitting L4: '+sitename)
         logging.info(' Quitting L4: '+sitename)
     else:
         self.do_progress(text='Finished L4: '+sitename)
         logging.info(' Finished L4: '+sitename)
         self.do_progress(text='Saving L4 gap filled data ...')         # put up the progress message
         outfilename = qcio.get_outfilenamefromcf(cf)
         if len(outfilename)==0: self.do_progress(text='An error occurred, check the console ...'); return
         ncFile = qcio.nc_open_write(outfilename)
         outputlist = qcio.get_outputlistfromcf(cf,'nc')
         qcio.nc_write_series(ncFile,ds4,outputlist=outputlist)         # save the L4 data
         self.do_progress(text='Finished saving L4 gap filled data')    # tell the user we are done
         logging.info(' Finished saving L4 gap filled data')
     logging.info("")        
示例#3
0
def l1qc_write_netcdf(cf, ds1):
    if ds1.returncodes["value"] == 0:
        outfilename = qcio.get_outfilenamefromcf(cf)
        ncFile = qcio.nc_open_write(outfilename)
        qcio.nc_write_series(ncFile, ds1)
        self.do_progress(text='Finished L1')
        logging.info(' Finished L1')
        logging.info("")
    else:
        msg = 'An error occurred, check the console ...'
        self.do_progress(text=msg)
示例#4
0
 def do_l2qc(self):
     """
         Call qcls.l2qc function
         Performs L2 QA/QC processing on raw data
         Outputs L2 netCDF file to ncData folder
         
         ControlFiles:
             L2_year.txt
             or
             L2.txt
         
         ControlFile contents (see ControlFile/Templates/L2.txt for example):
             [General]:
                 Enter list of functions to be performed
             [Files]:
                 L1 input file name and path
                 L2 output file name and path
             [Variables]:
                 Variable names and parameters for:
                     Range check to set upper and lower rejection limits
                     Diurnal check to reject observations by time of day that
                         are outside specified standard deviation limits
                     Timestamps for excluded dates
                     Timestamps for excluded hours
             [Plots]:
                 Variable lists for plot generation
         """
     logging.info(" Starting L2 processing ...")
     self.do_progress(text='Load L2 Control File ...')
     self.cf = qcio.load_controlfile(path='controlfiles')
     if len(self.cf)==0:
         logging.info( " L2: no control file chosen")
         self.do_progress(text='Waiting for input ...')
         return
     infilename = qcio.get_infilenamefromcf(self.cf)
     if not qcutils.file_exists(infilename): self.do_progress(text='An error occurred, check the console ...'); return
     self.do_progress(text='Doing L2 QC ...')
     self.ds1 = qcio.nc_read_series(infilename)
     if len(self.ds1.series.keys())==0: self.do_progress(text='An error occurred, check the console ...'); del self.ds1; return
     self.update_startenddate(str(self.ds1.series['DateTime']['Data'][0]),
                              str(self.ds1.series['DateTime']['Data'][-1]))
     self.ds2 = qcls.l2qc(self.cf,self.ds1)
     logging.info(' Finished L2 QC process')
     self.do_progress(text='Finished L2 QC process')
     self.do_progress(text='Saving L2 QC ...')                     # put up the progress message
     outfilename = qcio.get_outfilenamefromcf(self.cf)
     if len(outfilename)==0: self.do_progress(text='An error occurred, check the console ...'); return
     ncFile = qcio.nc_open_write(outfilename)
     qcio.nc_write_series(ncFile,self.ds2)                                  # save the L2 data
     self.do_progress(text='Finished saving L2 QC data')              # tdo_progressell the user we are done
     logging.info(' Finished saving L2 QC data')
     logging.info("") 
示例#5
0
    flag = numpy.zeros(len(dt_loc), dtype=numpy.int32)
    attr = qcutils.MakeAttributeDictionary(
        long_name="MODIS EVI, smoothed and interpolated",
        units="none",
        horiz_resolution="250m",
        cutout_size=str(site_cutout),
        evi_quality_threshold=str(evi_quality_threshold),
        evi_sd_threshold=str(evi_sd_threshold),
        evi_interpolate=str(evi_interpolate),
        evi_smooth_filter=str(evi_smooth_filter),
        sg_num_points=str(sg_num_points),
        sg_order=str(sg_num_points))
    qcutils.CreateSeries(ds, "EVI", evi_interp2_smooth, Flag=flag, Attr=attr)

    attr = qcutils.MakeAttributeDictionary(
        long_name="MODIS EVI, interpolated",
        units="none",
        horiz_resolution="250m",
        cutout_size=str(site_cutout),
        evi_quality_threshold=str(evi_quality_threshold),
        evi_sd_threshold=str(evi_sd_threshold),
        evi_interpolate=str(evi_interpolate))
    qcutils.CreateSeries(ds,
                         "EVI_notsmoothed",
                         evi_interp2,
                         Flag=flag,
                         Attr=attr)
    # now write the data structure to a netCDF file
    out_file = qcio.nc_open_write(out_name)
    qcio.nc_write_series(out_file, ds, ndims=1)
示例#6
0
         cfname = cf_batch["Levels"][level][i]
         logging.info('Starting L1 processing with '+cfname)
         cf = qcio.get_controlfilecontents(cfname)
         qcio.xl2nc(cf,'L1')
         logging.info('Finished L1 processing with '+cfname)
 elif level.lower()=="l2":
     # L2 processing
     for i in cf_batch["Levels"][level].keys():
         cfname = cf_batch["Levels"][level][i]
         logging.info('Starting L2 processing with '+cfname)
         cf = qcio.get_controlfilecontents(cfname)
         infilename = qcio.get_infilenamefromcf(cf)
         ds1 = qcio.nc_read_series(infilename)
         ds2 = qcls.l2qc(cf,ds1)
         outfilename = qcio.get_outfilenamefromcf(cf)
         ncFile = qcio.nc_open_write(outfilename)
         qcio.nc_write_series(ncFile,ds2)
         logging.info('Finished L2 processing with '+cfname)
 elif level.lower()=="l3":
     # L3 processing
     for i in cf_batch["Levels"][level].keys():
         cfname = cf_batch["Levels"][level][i]
         logging.info('Starting L3 processing with '+cfname)
         cf = qcio.get_controlfilecontents(cfname)
         infilename = qcio.get_infilenamefromcf(cf)
         ds2 = qcio.nc_read_series(infilename)
         ds3 = qcls.l3qc(cf,ds2)
         outfilename = qcio.get_outfilenamefromcf(cf)
         outputlist = qcio.get_outputlistfromcf(cf,'nc')
         ncFile = qcio.nc_open_write(outfilename)
         qcio.nc_write_series(ncFile,ds3,outputlist=outputlist)
示例#7
0
                # add the ISD site ID
                var_all["Attr"]["isd_site_id"] = isd_site_id
                # copy the data and flag onto the matching times
                var_all["Data"][idx] = var_out["Data"]
                var_all["Flag"][idx] = var_out["Flag"]
                # put the data, flag and attributes into the all-in-one data structure
                qcutils.CreateVariable(ds_all, var_all)
        # write the netCDF file with the combined data for this year
        if len(fluxnet_id) == 0:
            nc_dir_path = os.path.join(out_base_path,site,"Data","ISD")
            nc_file_name = site+"_ISD_"+str(year)+".nc"
        else:
            nc_dir_path = os.path.join(out_base_path,fluxnet_id,"Data","ISD")
            nc_file_name = fluxnet_id+"_ISD_"+str(year)+".nc"
        if not os.path.exists(nc_dir_path):
            os.makedirs(nc_dir_path)
        nc_file_path = os.path.join(nc_dir_path,nc_file_name)
        nc_file = qcio.nc_open_write(nc_file_path)
        qcio.nc_write_series(nc_file, ds_all, ndims=1)
        cf_concat["Files"]["In"][str(n)] = nc_file_path
    # concatenate the yearly files for this site
    #cf_concat.filename = "../controlfiles/ISD/concat.txt"
    #cf_concat.write()
    qcio.nc_concatenate(cf_concat)

# write the time steps out to an Excel file
xl_file_path = os.path.join(isd_base_path, "ISD_site_timesteps.xls")
xl_write_ISD_timesteps(xl_file_path, isd_time_steps)

logger.info("All done")
    get_relativehumidity(ds_60minutes)
    # absolute humidity from temperature and relative humidity
    get_absolutehumidity(ds_60minutes)
    # soil moisture from kg/m2 to m3/m3
    changeunits_soilmoisture(ds_60minutes)
    # net radiation and upwelling short and long wave radiation
    get_radiation(ds_60minutes)
    # ground heat flux as residual
    get_groundheatflux(ds_60minutes)
    # Available energy
    get_availableenergy(ds_60minutes)
    if info["interpolate"]:
        # interploate from 60 minute time step to 30 minute time step
        logging.info("Interpolating data to 30 minute time step")
        ds_30minutes = interpolate_to_30minutes(ds_60minutes)
        # get instantaneous precipitation from accumulated precipitation
        get_instantaneous_precip30(ds_30minutes)
        # write to netCDF file
        logging.info("Writing 30 minute data to netCDF file")
        ncfile = qcio.nc_open_write(info["out_filename"])
        qcio.nc_write_series(ncfile, ds_30minutes,ndims=1)
    else:
        # get instantaneous precipitation from accumulated precipitation
        get_instantaneous_precip60(ds_60minutes)
        # write to netCDF file
        logging.info("Writing 60 minute data to netCDF file")
        ncfile = qcio.nc_open_write(info["out_filename"])
        qcio.nc_write_series(ncfile, ds_60minutes,ndims=1)

logging.info('All done!')
示例#9
0
    get_relativehumidity(ds_60minutes)
    # absolute humidity from temperature and relative humidity
    get_absolutehumidity(ds_60minutes)
    # soil moisture from kg/m2 to m3/m3
    changeunits_soilmoisture(ds_60minutes)
    # net radiation and upwelling short and long wave radiation
    get_radiation(ds_60minutes)
    # ground heat flux as residual
    get_groundheatflux(ds_60minutes)
    # Available energy
    get_availableenergy(ds_60minutes)
    if info["interpolate"]:
        # interploate from 60 minute time step to 30 minute time step
        logging.info("Interpolating data to 30 minute time step")
        ds_30minutes = interpolate_to_30minutes(ds_60minutes)
        # get instantaneous precipitation from accumulated precipitation
        get_instantaneous_precip30(ds_30minutes)
        # write to netCDF file
        logging.info("Writing 30 minute data to netCDF file")
        ncfile = qcio.nc_open_write(info["out_filename"])
        qcio.nc_write_series(ncfile, ds_30minutes,ndims=1)
    else:
        # get instantaneous precipitation from accumulated precipitation
        get_instantaneous_precip60(ds_60minutes)
        # write to netCDF file
        logging.info("Writing 60 minute data to netCDF file")
        ncfile = qcio.nc_open_write(info["out_filename"])
        qcio.nc_write_series(ncfile, ds_60minutes,ndims=1)

logging.info('All done!')
示例#10
0
        Ws_erai_tts = numpy.sqrt(U_erai_tts * U_erai_tts +
                                 V_erai_tts * V_erai_tts)
        flag = numpy.zeros(len(Ws_erai_tts), dtype=numpy.int32)
        attr = qcutils.MakeAttributeDictionary(long_name="Wind speed",
                                               units="m/s")
        qcutils.CreateSeries(ds_erai, "Ws", Ws_erai_tts, flag, attr)
        Wd_erai_tts = float(270) - numpy.arctan2(
            V_erai_tts, U_erai_tts) * float(180) / numpy.pi
        idx = numpy.where(Wd_erai_tts > 360)[0]
        if len(idx) > 0: Wd_erai_tts[idx] = Wd_erai_tts[idx] - float(360)
        flag = numpy.zeros(len(Wd_erai_tts), dtype=numpy.int32)
        attr = qcutils.MakeAttributeDictionary(long_name="Wind direction",
                                               units="deg")
        qcutils.CreateSeries(ds_erai, "Wd", Wd_erai_tts, flag, attr)
        # write the yearly file for this site
        ncfile = qcio.nc_open_write(out_file_path)
        qcio.nc_write_series(ncfile, ds_erai, ndims=1)
        # add this yearly file to the control file dictionary for this site
        cf_dict[site_name]["Files"]["In"][str(n)] = out_file_path
        # tell the user we have finished this site
        logger.info("Finished " + site_name)
        logger.info("")
# now we need to loop over the contents of the concatenate control file dictionary
for site_name in site_list:
    cf_concat = cf_dict[site_name]
    #cf_concat.filename = os.path.join("../controlfiles/OzFlux/ERAI/",site_name+"_concatenate.txt")
    #cf_concat.write()
    msg = "Concatenating yearly files for " + site_name
    logger.info(msg)
    qcio.nc_concatenate(cf_concat)
                              flag_60minute, attr)
     elif "Wd" in item:
         Ws_30minute, flag_30minute, attr = qcutils.GetSeriesasMA(
             ds_aws_30minute, item, si=si_wholehour, ei=ei_wholehour)
         Wd_30minute, flag_30minute, attr = qcutils.GetSeriesasMA(
             ds_aws_30minute, item, si=si_wholehour, ei=ei_wholehour)
         U_30minute, V_30minute = qcutils.convert_WsWdtoUV(
             Ws_30minute, Wd_30minute)
         U_2d = numpy.reshape(U_30minute, (nRecs_30minute / 2, 2))
         V_2d = numpy.reshape(V_30minute, (nRecs_30minute / 2, 2))
         flag_2d = numpy.reshape(flag_30minute, (nRecs_30minute / 2, 2))
         U_60minute = numpy.ma.sum(U_2d, axis=1)
         V_60minute = numpy.ma.sum(V_2d, axis=1)
         Ws_60minute, Wd_60minute = qcutils.convert_UVtoWsWd(
             U_60minute, V_60minute)
         flag_60minute = numpy.ma.max(flag_2d, axis=1)
         qcutils.CreateSeries(ds_aws_60minute, item, Wd_60minute,
                              flag_60minute, attr)
     else:
         data_30minute, flag_30minute, attr = qcutils.GetSeriesasMA(
             ds_aws_30minute, item, si=si_wholehour, ei=ei_wholehour)
         data_2d = numpy.reshape(data_30minute, (nRecs_30minute / 2, 2))
         flag_2d = numpy.reshape(flag_30minute, (nRecs_30minute / 2, 2))
         data_60minute = numpy.ma.average(data_2d, axis=1)
         flag_60minute = numpy.ma.max(flag_2d, axis=1)
         qcutils.CreateSeries(ds_aws_60minute, item, data_60minute,
                              flag_60minute, attr)
 # write out the 60 minute data
 nc_60minute = aws_name.replace('.nc', '_60minute.nc')
 ncfile = qcio.nc_open_write(nc_60minute)
 qcio.nc_write_series(ncfile, ds_aws_60minute, ndims=1)
示例#12
0
        Ta,f,a = qcutils.GetSeriesasMA(ds_all,Ta_label)
        RH,f,a = qcutils.GetSeriesasMA(ds_all,RH_label)
        ps,f,a = qcutils.GetSeriesasMA(ds_all,ps_label)
        Ah = mf.absolutehumidityfromRH(Ta, RH)
        attr = qcutils.MakeAttributeDictionary(long_name='Absolute humidity',units='g/m3',standard_name='not defined',
                                               bom_id=a["bom_id"],bom_name=a["bom_name"],bom_dist=a["bom_dist"])
        qcutils.CreateSeries(ds_all,RH_label.replace("RH","Ah"),Ah,Flag=f,Attr=attr)
        q = mf.specifichumidityfromRH(RH, Ta, ps)
        attr = qcutils.MakeAttributeDictionary(long_name='Specific humidity',units='kg/kg',standard_name='not defined',
                                               bom_id=a["bom_id"],bom_name=a["bom_name"],bom_dist=a["bom_dist"])
        qcutils.CreateSeries(ds_all,RH_label.replace("RH","q"),q,Flag=f,Attr=attr)
    
    # now write the data structure to file
    # OMG, the user may want to overwrite the old data ...
    if os.path.exists(ncname):
        # ... but we will save them from themselves!
        t = time.localtime()
        rundatetime = datetime.datetime(t[0],t[1],t[2],t[3],t[4],t[5]).strftime("%Y%m%d%H%M")
        new_ext = "_"+rundatetime+".nc"
        # add the current local datetime the old file name
        newFileName = ncname.replace(".nc",new_ext)
        msg = " Renaming "+ncname+" to "+newFileName
        log.info(msg)
        # ... and rename the old file to preserve it
        os.rename(ncname,newFileName)
        # now the old file will not be overwritten
    ncfile = qcio.nc_open_write(ncname)
    qcio.nc_write_series(ncfile,ds_all,ndims=1)
    log.info("Finished site: "+site_name)

print "aws2nc: All done"
示例#13
0
        # and then precipitation
        precip_30,flag_30,attr = qcutils.GetSeriesasMA(ds_30,"Precip")
        precip_30_2d = numpy.reshape(precip_30,(nRecs_60,2))
        precip_60 = numpy.sum(precip_30_2d,axis=1)
        qcutils.CreateSeries(ds_60,"Precip",precip_60,flag_60,attr)
        # get a list of the variables, exclude the QC flags
        series_list = [item for item in ds_30.series.keys() if "_QCFlag" not in item]
        # remove the datetime variables
        for item in ["DateTime","DateTime_UTC","time","Precip","xlDateTime","xlDateTime_UTC"
                     "Year","Month","Day","Hour","Minute","Second"]:
            if item in series_list: series_list.remove(item)
        # loop over variables
        for series in series_list:
            data_30,flag_30,attr = qcutils.GetSeriesasMA(ds_30,series)
            data_30_2d=numpy.reshape(data_30,(nRecs_60,2))
            data_60=numpy.average(data_30_2d,axis=1)
            qcutils.CreateSeries(ds_60,series,data_60,flag_60,attr)
        # get the year, month etc
        qcutils.get_ymdhmsfromdatetime(ds_60)
        # get the Excel datetime values
        xl_date_loc = qcutils.get_xldatefromdatetime(ds_60)
        # write the output file
        ncfile = qcio.nc_open_write(outfilename)
        qcio.nc_write_series(ncfile,ds_60,ndims=1)
    else:
        # write the output file
        ncfile = qcio.nc_open_write(outfilename)
        qcio.nc_write_series(ncfile,ds_30,ndims=1)
    log.info("Finished site: "+site)

print "All done"
示例#14
0
文件: evi2nc.py 项目: OzFlux/OzFluxQC
 ds.globalattributes["nc_nrecs"] = len(dt_loc)
 ds.globalattributes["start_datetime"] = str(dt_loc[0])
 ds.globalattributes["end_datetime"] = str(dt_loc[-1])
 # get the Excel datetime
 qcutils.get_xldatefromdatetime(ds)
 # get the year, month, day, hour, minute and second
 qcutils.get_ymdhmsfromdatetime(ds)    
 # put the QC'd, smoothed and interpolated EVI into the data structure
 flag = numpy.zeros(len(dt_loc),dtype=numpy.int32)
 attr = qcutils.MakeAttributeDictionary(long_name="MODIS EVI, smoothed and interpolated",units="none",
                                        horiz_resolution="250m",
                                        cutout_size=str(site_cutout),
                                        evi_quality_threshold=str(evi_quality_threshold),
                                        evi_sd_threshold=str(evi_sd_threshold),
                                        evi_interpolate=str(evi_interpolate),
                                        evi_smooth_filter=str(evi_smooth_filter),
                                        sg_num_points=str(sg_num_points),
                                        sg_order=str(sg_num_points))
 qcutils.CreateSeries(ds,"EVI",evi_interp2_smooth,Flag=flag,Attr=attr)
 
 attr = qcutils.MakeAttributeDictionary(long_name="MODIS EVI, interpolated",units="none",
                                        horiz_resolution="250m",
                                        cutout_size=str(site_cutout),
                                        evi_quality_threshold=str(evi_quality_threshold),
                                        evi_sd_threshold=str(evi_sd_threshold),
                                        evi_interpolate=str(evi_interpolate))
 qcutils.CreateSeries(ds,"EVI_notsmoothed",evi_interp2,Flag=flag,Attr=attr)
 # now write the data structure to a netCDF file
 out_file = qcio.nc_open_write(out_name)
 qcio.nc_write_series(out_file,ds,ndims=1)
 
示例#15
0
 for item in series_list:
     if "Precip" in item:
         data_30minute,flag_30minute,attr = qcutils.GetSeriesasMA(ds_aws_30minute,item,si=si_wholehour,ei=ei_wholehour)
         data_2d = numpy.reshape(data_30minute,(nRecs_30minute/2,2))
         flag_2d = numpy.reshape(flag_30minute,(nRecs_30minute/2,2))
         data_60minute = numpy.ma.sum(data_2d,axis=1)
         flag_60minute = numpy.ma.max(flag_2d,axis=1)
         qcutils.CreateSeries(ds_aws_60minute,item,data_60minute,Flag=flag_60minute,Attr=attr)
     elif "Wd" in item:
         Ws_30minute,flag_30minute,attr = qcutils.GetSeriesasMA(ds_aws_30minute,item,si=si_wholehour,ei=ei_wholehour)
         Wd_30minute,flag_30minute,attr = qcutils.GetSeriesasMA(ds_aws_30minute,item,si=si_wholehour,ei=ei_wholehour)
         U_30minute,V_30minute = qcutils.convert_WsWdtoUV(Ws_30minute,Wd_30minute)
         U_2d = numpy.reshape(U_30minute,(nRecs_30minute/2,2))
         V_2d = numpy.reshape(V_30minute,(nRecs_30minute/2,2))
         flag_2d = numpy.reshape(flag_30minute,(nRecs_30minute/2,2))
         U_60minute = numpy.ma.sum(U_2d,axis=1)
         V_60minute = numpy.ma.sum(V_2d,axis=1)
         Ws_60minute,Wd_60minute = qcutils.convert_UVtoWsWd(U_60minute,V_60minute)
         flag_60minute = numpy.ma.max(flag_2d,axis=1)
         qcutils.CreateSeries(ds_aws_60minute,item,Wd_60minute,Flag=flag_60minute,Attr=attr)
     else:
         data_30minute,flag_30minute,attr = qcutils.GetSeriesasMA(ds_aws_30minute,item,si=si_wholehour,ei=ei_wholehour)
         data_2d = numpy.reshape(data_30minute,(nRecs_30minute/2,2))
         flag_2d = numpy.reshape(flag_30minute,(nRecs_30minute/2,2))
         data_60minute = numpy.ma.average(data_2d,axis=1)
         flag_60minute = numpy.ma.max(flag_2d,axis=1)
         qcutils.CreateSeries(ds_aws_60minute,item,data_60minute,Flag=flag_60minute,Attr=attr)
 # write out the 60 minute data
 nc_60minute = aws_name.replace('.nc','_60minute.nc')
 ncfile = qcio.nc_open_write(nc_60minute)
 qcio.nc_write_series(ncfile, ds_aws_60minute, ndims=1)
示例#16
0
 def do_l3qc(self):
     """
         Call qcls.l3qc_sitename function
         Performs L3 Corrections and QA/QC processing on L2 data
         Outputs L3 netCDF file to ncData folder
         Outputs L3 netCDF file to OzFlux folder
         
         Available corrections:
         * corrections requiring ancillary measurements or samples
           marked with an asterisk
             Linear correction
                 fixed slope
                 linearly shifting slope
             Conversion of virtual temperature to actual temperature
             2D Coordinate rotation
             Massman correction for frequency attenuation*
             Webb, Pearman and Leuning correction for flux effects on density
                 measurements
             Conversion of virtual heat flux to actual heat flux
             Correction of soil moisture content to empirical calibration
                 curve*
             Addition of soil heat storage to ground ground heat flux*
         
         ControlFiles:
             L3_year.txt
             or
             L3a.txt
         
         ControlFile contents (see ControlFile/Templates/L3.txt for example):
             [General]:
                 Python control parameters
             [Files]:
                 L2 input file name and path
                 L3 output file name and ncData folder path
                 L3 OzFlux output file name and OzFlux folder path
             [Massman] (where available):
                 Constants used in frequency attenuation correction
                     zmd: instrument height (z) less zero-plane displacement
                         height (d), m
                     z0: aerodynamic roughness length, m
                     angle: angle from CSAT mounting point between CSAT and
                         IRGA mid-path, degrees
                     CSATarm: distance from CSAT mounting point to CSAT
                         mid-path, m
                     IRGAarm: distance from CSAT mounting point to IRGA
                         mid-path, m
             [Soil]:
                 Constants used in correcting Fg for storage and in empirical
                 corrections of soil water content 
                     FgDepth: Heat flux plate depth, m
                     BulkDensity: Soil bulk density, kg/m3
                     OrganicContent: Soil organic content, fraction
                     SwsDefault
                     Constants for empirical corrections using log(sensor)
                         and exp(sensor) functions (SWC_a0, SWC_a1, SWC_b0,
                         SWC_b1, SWC_t, TDR_a0, TDR_a1, TDR_b0, TDR_b1,
                         TDR_t)
                     Variable and attributes lists (empSWCin, empSWCout,
                         empTDRin, empTDRout, linTDRin, SWCattr, TDRattr)
             [Output]:
                 Variable subset list for OzFlux output file
             [Variables]:
                 Variable names and parameters for:
                     Range check to set upper and lower rejection limits
                     Diurnal check to reject observations by time of day that
                         are outside specified standard deviation limits
                     Timestamps, slope, and offset for Linear correction
             [Plots]:
                 Variable lists for plot generation
         """
     logging.info(" Starting L3 processing ...")
     self.cf = qcio.load_controlfile(path='controlfiles')
     if len(self.cf)==0:
         logging.info( " L3: no control file chosen")            
         self.do_progress(text='Waiting for input ...')
         return
     infilename = qcio.get_infilenamefromcf(self.cf)
     if not qcutils.file_exists(infilename): self.do_progress(text='An error occurred, check the console ...'); return
     self.ds2 = qcio.nc_read_series(infilename)
     if len(self.ds2.series.keys())==0: self.do_progress(text='An error occurred, check the console ...'); del self.ds2; return
     self.update_startenddate(str(self.ds2.series['DateTime']['Data'][0]),
                              str(self.ds2.series['DateTime']['Data'][-1]))
     self.do_progress(text='Doing L3 QC & Corrections ...')
     self.ds3 = qcls.l3qc(self.cf,self.ds2)
     self.do_progress(text='Finished L3')
     txtstr = ' Finished L3: Standard processing for site: '
     txtstr = txtstr+self.ds3.globalattributes['site_name'].replace(' ','')
     logging.info(txtstr)
     self.do_progress(text='Saving L3 QC & Corrected NetCDF data ...')       # put up the progress message
     outfilename = qcio.get_outfilenamefromcf(self.cf)
     if len(outfilename)==0: self.do_progress(text='An error occurred, check the console ...'); return
     ncFile = qcio.nc_open_write(outfilename)
     outputlist = qcio.get_outputlistfromcf(self.cf,'nc')
     qcio.nc_write_series(ncFile,self.ds3,outputlist=outputlist)             # save the L3 data
     self.do_progress(text='Finished saving L3 QC & Corrected NetCDF data')  # tell the user we are done
     logging.info(' Finished saving L3 QC & Corrected NetCDF data')
     logging.info("")