def do_run_l1(cfg): """ Purpose: Top level routine for running the L1 data import. Usage: pfp_top_level.do_l1() Side effects: Creates an L1 netCDF file. Author: PRI Date: Back in the day Mods: December 2017: rewrite for use with new GUI """ try: logger.info("Starting L1 processing") ds1 = pfp_levels.l1qc(cfg) if ds1.returncodes["value"] == 0: outfilename = pfp_io.get_outfilenamefromcf(cfg) nc_file = pfp_io.nc_open_write(outfilename) if nc_file is None: return pfp_io.nc_write_series(nc_file, ds1) logger.info("Finished L1 processing") else: msg = "An error occurred during L1 processing" logger.error(msg) logger.info("") except Exception: msg = " Error running L1, see below for details ..." logger.error(msg) error_message = traceback.format_exc() logger.error(error_message) return
def do_L3_batch(cf_level): #logger = pfp_log.change_logger_filename("pfp_log", "L3") for i in list(cf_level.keys()): cf_file_name = os.path.split(cf_level[i]) msg = "Starting L3 processing with " + cf_file_name[1] logger.info(msg) try: cf = pfp_io.get_controlfilecontents(cf_level[i]) infilename = pfp_io.get_infilenamefromcf(cf) ds2 = pfp_io.nc_read_series(infilename) if ds2.returncodes["value"] != 0: return ds3 = pfp_levels.l3qc(cf, ds2) outfilename = pfp_io.get_outfilenamefromcf(cf) nc_file = pfp_io.nc_open_write(outfilename) if nc_file is None: return pfp_io.nc_write_series(nc_file, ds3) msg = "Finished L3 processing with " + cf_file_name[1] logger.info(msg) logger.info("") except Exception: msg = "Error occurred during L3 processing " + cf_file_name[1] logger.error(msg) error_message = traceback.format_exc() logger.error(error_message) continue return
def do_L6_batch(cf_level): logger = pfp_log.change_logger_filename("pfp_log", "L6") for i in cf_level.keys(): if not os.path.isfile(cf_level[i]): msg = " Control file " + cf_level[i] + " not found" logger.error(msg) continue cf_file_name = os.path.split(cf_level[i]) msg = "Starting L6 processing with " + cf_file_name[1] logger.info(msg) try: cf = pfp_io.get_controlfilecontents(cf_level[i]) if "Options" not in cf: cf["Options"] = {} cf["Options"]["call_mode"] = "batch" cf["Options"]["show_plots"] = "No" infilename = pfp_io.get_infilenamefromcf(cf) ds5 = pfp_io.nc_read_series(infilename) ds6 = pfp_levels.l6qc(None, cf, ds5) outfilename = pfp_io.get_outfilenamefromcf(cf) ncFile = pfp_io.nc_open_write(outfilename) pfp_io.nc_write_series(ncFile, ds6) msg = "Finished L6 processing with " + cf_file_name[1] logger.info(msg) logger.info("") except Exception: msg = "Error occurred during L6 with " + cf_file_name[1] logger.error(msg) error_message = traceback.format_exc() logger.error(error_message) continue return
def do_L5_batch(cf_level): #logger = pfp_log.change_logger_filename("pfp_log", "L5") for i in list(cf_level.keys()): if not os.path.isfile(cf_level[i]): msg = " Control file " + cf_level[i] + " not found" logger.error(msg) continue cf_file_name = os.path.split(cf_level[i]) msg = "Starting L5 processing with " + cf_file_name[1] logger.info(msg) try: cf_l5 = pfp_io.get_controlfilecontents(cf_level[i]) if "Options" not in cf_l5: cf_l5["Options"] = {} cf_l5["Options"]["call_mode"] = "batch" cf_l5["Options"]["show_plots"] = "No" infilename = pfp_io.get_infilenamefromcf(cf_l5) ds4 = pfp_io.nc_read_series(infilename) if ds4.returncodes["value"] != 0: return ds5 = pfp_levels.l5qc(None, cf_l5, ds4) outfilename = pfp_io.get_outfilenamefromcf(cf_l5) nc_file = pfp_io.nc_open_write(outfilename) if nc_file is None: return pfp_io.nc_write_series(nc_file, ds5) msg = "Finished L5 processing with " + cf_file_name[1] logger.info(msg) # now plot the fingerprints for the L5 files cf_fp = pfp_io.get_controlfilecontents("controlfiles/standard/fingerprint.txt") if "Files" not in dir(cf_fp): cf_fp["Files"] = {} file_name = pfp_io.get_outfilenamefromcf(cf_l5) file_path = ntpath.split(file_name)[0] + "/" cf_fp["Files"]["file_path"] = file_path cf_fp["Files"]["in_filename"] = ntpath.split(file_name)[1] if "plot_path" in cf_l5["Files"]: cf_fp["Files"]["plot_path"] = cf_l5["Files"]["plot_path"] else: cf_fp["Files"]["plot_path"] = file_path[:file_path.index("Data")] + "Plots/" if "Options" not in cf_fp: cf_fp["Options"] = {} cf_fp["Options"]["call_mode"] = "batch" cf_fp["Options"]["show_plots"] = "No" msg = "Doing fingerprint plots using " + cf_fp["Files"]["in_filename"] logger.info(msg) pfp_plot.plot_fingerprint(cf_fp) msg = "Finished fingerprint plots" logger.info(msg) logger.info("") except Exception: msg = "Error occurred during L5 with " + cf_file_name[1] logger.error(msg) error_message = traceback.format_exc() logger.error(error_message) continue return
def do_run_l3(cfg=None): """ Purpose: Top level routine for running the L23 post-processing. Usage: pfp_top_level.do_l3() Side effects: Creates an L3 netCDF file. Author: PRI Date: Back in the day Mods: December 2017: rewrite for use with new GUI """ try: logger.info("Starting L3 processing") if not cfg: cfg = pfp_io.load_controlfile() if len(cfg) == 0: logger.info("Quiting L3 processing (no control file)") return in_filepath = pfp_io.get_infilenamefromcf(cfg) if not pfp_utils.file_exists(in_filepath): in_filename = os.path.split(in_filepath) logger.error("File " + in_filename[1] + " not found") return ds2 = pfp_io.nc_read_series(in_filepath) ds3 = pfp_levels.l3qc(cfg, ds2) if ds3.returncodes["value"] != 0: logger.error("An error occurred during L3 processing") logger.error("") return out_filepath = pfp_io.get_outfilenamefromcf(cfg) nc_file = pfp_io.nc_open_write(out_filepath) pfp_io.nc_write_series(nc_file, ds3) logger.info("Finished L3 processing") if "Plots" in list(cfg.keys()): logger.info("Plotting L3 data") for nFig in cfg['Plots'].keys(): plt_cf = cfg['Plots'][str(nFig)] if 'Type' in plt_cf.keys(): if str(plt_cf['Type']).lower() == 'xy': pfp_plot.plotxy(cfg, nFig, plt_cf, ds2, ds3) else: pfp_plot.plottimeseries(cfg, nFig, ds2, ds3) else: pfp_plot.plottimeseries(cfg, nFig, ds2, ds3) logger.info("Finished plotting L3 data") except Exception: msg = " Error running L3, see below for details ..." logger.error(msg) error_message = traceback.format_exc() logger.error(error_message) logger.info("") return
def do_run_l5(main_gui, cfg): """ Purpose: Top level routine for running the L5 gap filling. Usage: pfp_top_level.do_run_l5() Side effects: Creates an L5 netCDF file with gap filled meteorology. Author: PRI Date: Back in the day Mods: December 2017: rewrite for use with new GUI """ try: logger.info("Starting L5 processing") in_filepath = pfp_io.get_infilenamefromcf(cfg) if not pfp_utils.file_exists(in_filepath): in_filename = os.path.split(in_filepath) logger.error("File " + in_filename[1] + " not found") return ds4 = pfp_io.nc_read_series(in_filepath) if ds4.returncodes["value"] != 0: return #ds4.globalattributes['controlfile_name'] = cfg['controlfile_name'] sitename = ds4.globalattributes['site_name'] if "Options" not in cfg: cfg["Options"] = {} cfg["Options"]["call_mode"] = "interactive" ds5 = pfp_levels.l5qc(main_gui, cfg, ds4) # check to see if all went well if ds5.returncodes["value"] != 0: # tell the user something went wrong logger.info("Quitting L5: " + sitename) # delete the output file if it exists out_filepath = pfp_io.get_outfilenamefromcf(cfg) if os.path.isfile(out_filepath): os.remove(out_filepath) else: # tell the user we are finished logger.info("Finished L5: " + sitename) # get the output file name from the control file out_filepath = pfp_io.get_outfilenamefromcf(cfg) # open it for writing nc_file = pfp_io.nc_open_write(out_filepath) if nc_file is None: return # write the output file pfp_io.nc_write_series(nc_file, ds5) logger.info("Finished saving L5 gap filled data") logger.info("") except Exception: msg = " Error running L5, see below for details ..." logger.error(msg) error_message = traceback.format_exc() logger.error(error_message) return
def do_L4_batch(cf_level): for i in cf_level.keys(): if not os.path.isfile(cf_level[i]): msg = " Control file " + cf_level[i] + " not found" logger.error(msg) continue cf_file_name = os.path.split(cf_level[i]) msg = "Starting L4 processing with " + cf_file_name[1] logger.info(msg) try: cf_l4 = pfp_io.get_controlfilecontents(cf_level[i]) if "Options" not in cf_l4: cf_l4["Options"] = {} cf_l4["Options"]["call_mode"] = "batch" cf_l4["Options"]["show_plots"] = "No" infilename = pfp_io.get_infilenamefromcf(cf_l4) ds3 = pfp_io.nc_read_series(infilename) ds4 = pfp_levels.l4qc(None, cf_l4, ds3) outfilename = pfp_io.get_outfilenamefromcf(cf_l4) outputlist = pfp_io.get_outputlistfromcf(cf_l4, "nc") ncFile = pfp_io.nc_open_write(outfilename) pfp_io.nc_write_series(ncFile, ds4, outputlist=outputlist) msg = "Finished L4 processing with " + cf_file_name[1] logger.info(msg) # now plot the fingerprints for the L4 files cf_fp = pfp_io.get_controlfilecontents("controlfiles/standard/fingerprint.txt") if "Files" not in dir(cf_fp): cf_fp["Files"] = {} file_name = pfp_io.get_outfilenamefromcf(cf_l4) file_path = ntpath.split(file_name)[0] + "/" cf_fp["Files"]["file_path"] = file_path cf_fp["Files"]["in_filename"] = ntpath.split(file_name)[1] if "plot_path" in cf_l4["Files"]: cf_fp["Files"]["plot_path"] = cf_l4["Files"]["plot_path"] else: cf_fp["Files"]["plot_path"] = file_path[:file_path.index("Data")] + "Plots/" if "Options" not in cf_fp: cf_fp["Options"] = {} cf_fp["Options"]["call_mode"] = "batch" cf_fp["Options"]["show_plots"] = "No" msg = "Doing fingerprint plots using " + cf_fp["Files"]["in_filename"] logger.info(msg) pfp_plot.plot_fingerprint(cf_fp) logger.info("Finished fingerprint plots") logger.info("") except Exception: msg = "Error occurred during L4 with " + cf_file_name[1] logger.error(msg) error_message = traceback.format_exc() logger.error(error_message) continue return
def do_run_l6(main_gui, cfg=None): """ Purpose: Top level routine for running the L6 gap filling. Usage: pfp_top_level.do_run_l6() Side effects: Creates an L6 netCDF file with NEE partitioned into GPP and ER. Author: PRI Date: Back in the day Mods: December 2017: rewrite for use with new GUI """ try: logger.info("Starting L6 processing") if not cfg: cfg = pfp_io.load_controlfile(path='controlfiles') if len(cfg) == 0: logger.info("Quiting L6 processing (no control file)") return in_filepath = pfp_io.get_infilenamefromcf(cfg) if not pfp_utils.file_exists(in_filepath): in_filename = os.path.split(in_filepath) logger.error("File " + in_filename[1] + " not found") return ds5 = pfp_io.nc_read_series(in_filepath) #ds5.globalattributes['controlfile_name'] = cfg['controlfile_name'] sitename = ds5.globalattributes['site_name'] if "Options" not in cfg: cfg["Options"] = {} cfg["Options"]["call_mode"] = "interactive" cfg["Options"]["show_plots"] = "Yes" ds6 = pfp_levels.l6qc(main_gui, cfg, ds5) if ds6.returncodes["value"] != 0: logger.info("Quitting L6: " + sitename) else: logger.info("Finished L6: " + sitename) out_filepath = pfp_io.get_outfilenamefromcf(cfg) nc_file = pfp_io.nc_open_write(out_filepath) pfp_io.nc_write_series(nc_file, ds6) logger.info("Finished saving L6 gap filled data") logger.info("") except Exception: msg = " Error running L6, see below for details ..." logger.error(msg) error_message = traceback.format_exc() logger.error(error_message) return
def nc_update(cfg): """ Purpose: Update a PFP-style netCDF file by changing variable names and attributes. Usage: Author: PRI Date: October 2018 """ nc_file_path = pfp_io.get_infilenamefromcf(cfg) ds = pfp_io.nc_read_series(nc_file_path) change_variable_names(cfg, ds) copy_ws_wd(ds) remove_variables(cfg, ds) change_global_attributes(cfg, ds) nc_file = pfp_io.nc_open_write(nc_file_path) pfp_io.nc_write_series(nc_file, ds) return 0
def do_run_l4(main_gui, cfg): """ Purpose: Top level routine for running the L4 gap filling. Usage: pfp_top_level.do_run_l4() Side effects: Creates an L4 netCDF file with gap filled meteorology. Author: PRI Date: Back in the day Mods: December 2017: rewrite for use with new GUI """ try: logger.info("Starting L4 processing") in_filepath = pfp_io.get_infilenamefromcf(cfg) if not pfp_utils.file_exists(in_filepath): in_filename = os.path.split(in_filepath) logger.error("File " + in_filename[1] + " not found") return ds3 = pfp_io.nc_read_series(in_filepath) if ds3.returncodes["value"] != 0: return #ds3.globalattributes['controlfile_name'] = cfg['controlfile_name'] sitename = ds3.globalattributes['site_name'] if "Options" not in cfg: cfg["Options"] = {} cfg["Options"]["call_mode"] = "interactive" ds4 = pfp_levels.l4qc(main_gui, cfg, ds3) if ds4.returncodes["value"] != 0: logger.info("Quitting L4: " + sitename) else: logger.info("Finished L4: " + sitename) out_filepath = pfp_io.get_outfilenamefromcf(cfg) nc_file = pfp_io.nc_open_write(out_filepath) if nc_file is None: return pfp_io.nc_write_series(nc_file, ds4) # save the L4 data logger.info("Finished saving L4 gap filled data") logger.info("") except Exception: msg = " Error running L4, see below for details ..." logger.error(msg) error_message = traceback.format_exc() logger.error(error_message) return
def do_L1_batch(cf_level): for i in cf_level.keys(): cf_file_name = os.path.split(cf_level[i]) logger.info("Starting L1 processing with %s", cf_file_name[1]) try: cf = pfp_io.get_controlfilecontents(cf_level[i]) ds1 = pfp_levels.l1qc(cf) outfilename = pfp_io.get_outfilenamefromcf(cf) ncFile = pfp_io.nc_open_write(outfilename) pfp_io.nc_write_series(ncFile, ds1) msg = "Finished L1 processing with " + cf_file_name[1] logger.info(msg) logger.info("") except Exception: msg = "Error occurred during L1 processing " + cf_file_name[1] logger.error(msg) error_message = traceback.format_exc() logger.error(error_message) continue return
def do_L3_batch(cf_level): for i in cf_level.keys(): cf_file_name = os.path.split(cf_level[i]) msg = "Starting L3 processing with " + cf_file_name[1] logger.info(msg) try: cf = pfp_io.get_controlfilecontents(cf_level[i]) infilename = pfp_io.get_infilenamefromcf(cf) ds2 = pfp_io.nc_read_series(infilename) ds3 = pfp_levels.l3qc(cf, ds2) outfilename = pfp_io.get_outfilenamefromcf(cf) outputlist = pfp_io.get_outputlistfromcf(cf, "nc") ncFile = pfp_io.nc_open_write(outfilename) pfp_io.nc_write_series(ncFile, ds3, outputlist=outputlist) msg = "Finished L3 processing with " + cf_file_name[1] logger.info(msg) logger.info("") except Exception: msg = "Error occurred during L3 processing " + cf_file_name[1] logger.error(msg) error_message = traceback.format_exc() logger.error(error_message) continue return
def do_L2_batch(cf_level): logger = pfp_log.change_logger_filename("pfp_log", "L2") for i in cf_level.keys(): cf_file_name = os.path.split(cf_level[i]) msg = "Starting L2 processing with " + cf_file_name[1] logger.info(msg) try: cf = pfp_io.get_controlfilecontents(cf_level[i]) infilename = pfp_io.get_infilenamefromcf(cf) ds1 = pfp_io.nc_read_series(infilename) ds2 = pfp_levels.l2qc(cf, ds1) outfilename = pfp_io.get_outfilenamefromcf(cf) ncFile = pfp_io.nc_open_write(outfilename) pfp_io.nc_write_series(ncFile, ds2) msg = "Finished L2 processing with " + cf_file_name[1] logger.info(msg) logger.info("") except Exception: msg = "Error occurred during L2 processing " + cf_file_name[1] logger.error(msg) error_message = traceback.format_exc() logger.error(error_message) continue return
if level.lower() not in [ "l1", "l2", "l3", "ecostress", "fluxnet", "reddyproc", "concatenate", "climatology", "cpd", "l4", "l5", "l6" ]: logger.warning("Unrecognised level " + level) continue if level.lower() == "l1": # L1 processing for i in cf_batch["Levels"][level].keys(): cfname = cf_batch["Levels"][level][i] cf_file_name = os.path.split(cfname) logger.info('Starting L1 processing with ' + cf_file_name[1]) cf = pfp_io.get_controlfilecontents(cfname) ds1 = pfp_ls.l1qc(cf) outfilename = pfp_io.get_outfilenamefromcf(cf) ncFile = pfp_io.nc_open_write(outfilename) pfp_io.nc_write_series(ncFile, ds1) logger.info('Finished L1 processing with ' + cf_file_name[1]) logger.info('') elif level.lower() == "l2": # L2 processing for i in cf_batch["Levels"][level].keys(): cfname = cf_batch["Levels"][level][i] cf_file_name = os.path.split(cfname) logger.info('Starting L2 processing with ' + cf_file_name[1]) cf = pfp_io.get_controlfilecontents(cfname) infilename = pfp_io.get_infilenamefromcf(cf) ds1 = pfp_io.nc_read_series(infilename) ds2 = pfp_ls.l2qc(cf, ds1) outfilename = pfp_io.get_outfilenamefromcf(cf) ncFile = pfp_io.nc_open_write(outfilename)
flag = numpy.zeros(len(Ws_era5_tts), dtype=numpy.int32) attr = pfp_utils.MakeAttributeDictionary(long_name="Wind speed", units="m/s") pfp_utils.CreateSeries(ds_era5, "Ws", Ws_era5_tts, flag, attr) Wd_era5_tts = float(270) - numpy.arctan2( V_era5_tts, U_era5_tts) * float(180) / numpy.pi idx = numpy.where(Wd_era5_tts > 360)[0] if len(idx) > 0: Wd_era5_tts[idx] = Wd_era5_tts[idx] - float(360) flag = numpy.zeros(len(Wd_era5_tts), dtype=numpy.int32) attr = pfp_utils.MakeAttributeDictionary(long_name="Wind direction", units="deg") pfp_utils.CreateSeries(ds_era5, "Wd", Wd_era5_tts, flag, attr) # === WRITE OUTPUT FILE FOR THE SITE # write the yearly file for this site ncfile = pfp_io.nc_open_write(out_file_path) pfp_io.nc_write_series(ncfile, ds_era5, ndims=1) # add this yearly file to the control file dictionary for this site cf_dict[site_name]["Files"]["In"][str(n + add_exist)] = out_file_path # tell the user we have finished this site logger.info("Finished " + site_name) logger.info("") # now we need to loop over the contents of the concatenate control file dictionary for site_name in site_list: cf_concat = cf_dict[site_name] print('CE: cf_concat = ', cf_concat) cf_concat.filename = os.path.join(concat_control_path, site_name + "_concatenate.txt") cf_concat.write() msg = "Concatenating monthly files for " + site_name
# update global attributes ds.globalattributes["nc_nrecs"] = len(dt_loc) ds.globalattributes["start_datetime"] = str(dt_loc[0]) ds.globalattributes["end_datetime"] = str(dt_loc[-1]) # put the QC'd, smoothed and interpolated EVI into the data structure flag = numpy.zeros(len(dt_loc), dtype=numpy.int32) attr = pfp_utils.MakeAttributeDictionary( long_name="MODIS EVI, smoothed and interpolated", units="none", horiz_resolution="250m", cutout_size=str(3), evi_min=str(evi_min), evi_max=str(evi_max), sg_num_points=str(sgnp), sg_order=str(sgo)) pfp_utils.CreateSeries(ds, "EVI", evi_ts["smoothed"], flag, attr) attr = pfp_utils.MakeAttributeDictionary(long_name="MODIS EVI, interpolated", units="none", horiz_resolution="250m", cutout_size=str(3), evi_min=str(evi_min), evi_max=str(evi_max)) pfp_utils.CreateSeries(ds, "EVI_notsmoothed", evi_ts["mean"], flag, attr) # now write the data structure to a netCDF file out_name = os.path.join(cf["Files"]["base_path"], site, "Data", "MODIS", site + "_EVI.nc") out_file = pfp_io.nc_open_write(out_name) pfp_io.nc_write_series(out_file, ds, ndims=1) print "modis_evi2nc: finished"
op = os.path.join(rp, site, "Data", "Processed") if not os.path.isdir(sp): print(sp + " , skipping site ...") continue files = sorted([f for f in os.listdir(sp) if ("L3" in f and ".nc" in f)]) if len(files) == 0: print("No files found in " + sp + " , skipping ...") continue for fn in files: ifp = os.path.join(sp, fn) print("Converting " + fn) cfg["Files"]["in_filename"] = ifp # read the input file ds1 = pfp_io.nc_read_series(ifp) # update the variable names change_variable_names(cfg, ds1) # make sure there are Ws and Wd series copy_ws_wd(ds1) # make sure we have all the variables we want ... ds2 = include_variables(cfg, ds1) # ... but not the ones we don't exclude_variables(cfg, ds2) # update the global attributes change_global_attributes(cfg, ds2) # update the variable attributes change_variable_attributes(cfg, ds2) # Fc single point storage consistent_Fc_storage(cfg, ds2, site) ofp = os.path.join(op, fn) nf = pfp_io.nc_open_write(ofp) pfp_io.nc_write_series(nf, ds2)