Пример #1
0
def do_L1_batch(main_ui, cf_level):
    for i in list(cf_level.keys()):
        # check the stop flag
        if main_ui.stop_flag:
            # break out of the loop if user requested stop
            break
        cf_file_name = os.path.split(cf_level[i])
        msg = "Starting L1 processing with " + cf_file_name[1]
        logger.info(msg)
        if not check_file_exits(cf_level[i]):
            return 0
        try:
            cf_l1 = pfp_io.get_controlfilecontents(cf_level[i])
            if not pfp_compliance.l1_update_controlfile(cf_l1):
                continue
            ds1 = pfp_levels.l1qc(cf_l1)
            outfilename = pfp_io.get_outfilenamefromcf(cf_l1)
            pfp_io.NetCDFWrite(outfilename, ds1)
            msg = "Finished L1 processing with " + cf_file_name[1]
            logger.info(msg)
            logger.info("")
        except Exception:
            msg = "Error occurred during L1 processing " + cf_file_name[1]
            logger.error(msg)
            error_message = traceback.format_exc()
            logger.error(error_message)
            continue
    return 1
Пример #2
0
 def save_as_netcdf_file(self, ds):
     """ Save the current tab with a different name."""
     # get the current tab index
     tab_index_current = self.tabs.tab_index_current
     # write the data structure to file
     pfp_io.NetCDFWrite(ds.info["filepath"], ds)
     # update the tab text
     tab_title = os.path.basename(str(ds.info["filepath"]))
     self.tabs.setTabText(tab_index_current, tab_title)
     return
Пример #3
0
 def save_netcdf_file(self):
     """Save the current tab as a netCDF file."""
     # get the current tab index
     tab_index_current = self.tabs.tab_index_current
     # get the updated control file data
     ds = self.tabs.tab_dict[tab_index_current].get_data_from_model()
     # write the data structure to file
     pfp_io.NetCDFWrite(ds.info["filepath"], ds)
     # remove the asterisk in the tab text
     tab_text = str(self.tabs.tabText(tab_index_current))
     self.tabs.setTabText(self.tabs.tab_index_current,
                          tab_text.replace("*", ""))
     return
Пример #4
0
def do_L2_batch(main_ui, cf_level):
    for i in list(cf_level.keys()):
        # check the stop flag
        if main_ui.stop_flag:
            # break out of the loop if user requested stop
            break
        cf_file_name = os.path.split(cf_level[i])
        msg = "Starting L2 processing with " + cf_file_name[1]
        logger.info(msg)
        if not check_file_exits(cf_level[i]):
            return 0
        try:
            cf_l2 = pfp_io.get_controlfilecontents(cf_level[i])
            if not pfp_compliance.l2_update_controlfile(cf_l2):
                continue
            if "Options" not in cf_l2:
                cf_l2["Options"] = {}
            cf_l2["Options"]["call_mode"] = "batch"
            cf_l2["Options"]["show_plots"] = "No"
            infilename = pfp_io.get_infilenamefromcf(cf_l2)
            ds1 = pfp_io.NetCDFRead(infilename)
            if ds1.info["returncodes"]["value"] != 0: return
            ds2 = pfp_levels.l2qc(cf_l2, ds1)
            outfilename = pfp_io.get_outfilenamefromcf(cf_l2)
            pfp_io.NetCDFWrite(outfilename, ds2)
            msg = "Finished L2 processing with " + cf_file_name[1]
            logger.info(msg)
            if "Plots" in list(cf_l2.keys()):
                logger.info("Plotting L1 and L2 data")
                for nFig in list(cf_l2['Plots'].keys()):
                    if "(disabled)" in nFig:
                        continue
                    plt_cf = cf_l2['Plots'][str(nFig)]
                    if 'type' in plt_cf.keys():
                        if str(plt_cf['type']).lower() == 'xy':
                            pfp_plot.plotxy(cf_l2, nFig, plt_cf, ds1, ds2)
                        else:
                            pfp_plot.plottimeseries(cf_l2, nFig, ds1, ds2)
                    else:
                        pfp_plot.plottimeseries(cf_l2, nFig, ds1, ds2)
                logger.info("Finished plotting L1 and L2 data")
            logger.info("")
        except Exception:
            msg = "Error occurred during L2 processing " + cf_file_name[1]
            logger.error(msg)
            error_message = traceback.format_exc()
            logger.error(error_message)
            continue
    return 1
Пример #5
0
def do_L5_batch(main_ui, cf_level):
    sites = sorted(list(cf_level.keys()), key=int)
    for i in sites:
        # check the stop flag
        if main_ui.stop_flag:
            # break out of the loop if user requested stop
            break
        cf_file_name = os.path.split(cf_level[i])
        msg = "Starting L5 processing with " + cf_file_name[1]
        logger.info(msg)
        if not check_file_exits(cf_level[i]):
            return 0
        try:
            cf_l5 = pfp_io.get_controlfilecontents(cf_level[i])
            if not pfp_compliance.l5_update_controlfile(cf_l5):
                continue
            if "Options" not in cf_l5:
                cf_l5["Options"] = {}
            cf_l5["Options"]["call_mode"] = "batch"
            cf_l5["Options"]["show_plots"] = "No"
            infilename = pfp_io.get_infilenamefromcf(cf_l5)
            ds4 = pfp_io.NetCDFRead(infilename)
            if ds4.info["returncodes"]["value"] != 0: return
            ds5 = pfp_levels.l5qc(None, cf_l5, ds4)
            outfilename = pfp_io.get_outfilenamefromcf(cf_l5)
            pfp_io.NetCDFWrite(outfilename, ds5)
            msg = "Finished L5 processing with " + cf_file_name[1]
            logger.info(msg)
            # do the CF compliance check
            #do_batch_cfcheck(cf_l5)
            # plot the L5 fingerprints
            do_batch_fingerprints(cf_l5)
            logger.info("")
        except Exception:
            msg = "Error occurred during L5 with " + cf_file_name[1]
            logger.error(msg)
            error_message = traceback.format_exc()
            logger.error(error_message)
            continue
    return 1
Пример #6
0
        # quick and dirty hack needed because V3.2 and V3.3 treat valid_range
        # attribute differently.
        labels = list(ds_all.series.keys())
        for label in labels:
            attrs = list(ds_all.series[label]["Attr"].keys())
            for attr in ["valid_range"]:
                if attr in attrs:
                    ds_all.series[label]["Attr"].pop(attr)
        # write the netCDF file with the combined data for this year
        if len(fluxnet_id) == 0:
            nc_dir_path = os.path.join(out_base_path, site, "Data", "ISD")
            nc_file_name = site + "_ISD_" + str(year) + ".nc"
        else:
            nc_dir_path = os.path.join(out_base_path, fluxnet_id, "Data",
                                       "ISD")
            nc_file_name = fluxnet_id + "_ISD_" + str(year) + ".nc"
        if not os.path.exists(nc_dir_path):
            os.makedirs(nc_dir_path)
        nc_file_path = os.path.join(nc_dir_path, nc_file_name)
        pfp_io.NetCDFWrite(nc_file_path, ds_all)
        cfg_concat["Files"]["In"][str(n)] = nc_file_path
    # concatenate the yearly files for this site
    info = pfp_compliance.ParseConcatenateControlFile(cfg_concat)
    pfp_io.NetCDFConcatenate(info)

# write the time steps out to an Excel file
xl_file_path = os.path.join(isd_base_path, "ISD_site_timesteps.xls")
xl_write_ISD_timesteps(xl_file_path, isd_time_steps)

logger.info("All done")
Пример #7
0
        cis["NetCDFConcatenate"]["in_file_names"].remove(main_in_file_name)
    # get the ACCESS data path
    access_file_path = os.path.join(existing_access_base_path, site, "Data",
                                    "ACCESS", "processed")
    # check the directory exists and create if it doesn't
    if not os.path.isdir(access_file_path):
        os.makedirs(access_file_path)
    # build the ACCESS file name
    dt = pfp_utils.GetVariable(dss_tts[site], "DateTime")
    start_date = dt["Data"][0].strftime("%Y%m%d%H%M")
    end_date = dt["Data"][-1].strftime("%Y%m%d%H%M")
    access_file_name = site + "_ACCESS" + "_" + start_date + "_" + end_date + ".nc"
    # get the full path including the file name
    access_file_uri = os.path.join(access_file_path, access_file_name)
    # and write the ACCESS data to a netCDF file
    pfp_io.NetCDFWrite(access_file_uri, dss_tts[site])
    cis["NetCDFConcatenate"]["in_file_names"].append(access_file_uri)
    # concatenate with the existing ACCESS data
    pfp_io.NetCDFConcatenate(cis)
    # save the concatenation_info dictionary
    cis_file_name = "cis_" + site + ".txt"
    cis_file_path = os.path.join(existing_access_base_path, site, "Data",
                                 "ACCESS", "cis")
    if not os.path.isdir(cis_file_path):
        os.mkdir(cis_file_path)
    cfg_cis = ConfigObj(cis, indent_type="    ", list_values=False)
    cfg_cis.filename = os.path.join(cis_file_path, cis_file_name)
    cfg_cis.write()

logger.info("All finished")
Пример #8
0
    main_in_file_name = ''.join(cis["NetCDFConcatenate"]["in_file_names"])
    if not os.path.isfile(main_in_file_name):
        cis["NetCDFConcatenate"]["in_file_names"].remove(main_in_file_name)
    # get the ERA5 data path
    era5_file_path = os.path.join(existing_era5_base_path, site, "Data", "ERA5", "processed")
    # check the directory exists and create if it doesn't
    if not os.path.isdir(era5_file_path):
        os.makedirs(era5_file_path)
    # build the ERA5 file name
    dt = pfp_utils.GetVariable(dss_tts[site], "DateTime")
    start_date = dt["Data"][0].strftime("%Y%m%d%H%M")
    end_date = dt["Data"][-1].strftime("%Y%m%d%H%M")
    era5_file_name = site + "_ERA5" + "_" + start_date + "_" + end_date + ".nc"
    # get the full path including the file name
    era5_file_uri = os.path.join(era5_file_path, era5_file_name)
    # and write the ERA5 data to a netCDF file
    pfp_io.NetCDFWrite(era5_file_uri, dss_tts[site])
    cis["NetCDFConcatenate"]["in_file_names"].append(era5_file_uri)
    # concatenate with the existing ERA5 data
    pfp_io.NetCDFConcatenate(cis)
    # save the concatenation_info dictionary
    cis_file_name = "cis_" + site + ".txt"
    cis_file_path = os.path.join(existing_era5_base_path, site, "Data", "ERA5", "cis")
    if not os.path.isdir(cis_file_path):
        os.mkdir(cis_file_path)
    cfg_cis = ConfigObj(cis, indent_type="    ", list_values=False)
    cfg_cis.filename = os.path.join(cis_file_path, cis_file_name)
    cfg_cis.write()

logger.info("All finished")
Пример #9
0
    # force the remaining attributes to values in control file
    labels = list(ds.series.keys())
    cfg_labels = [l for l in list(cfg["Variables"].keys())]
    for cfg_label in cfg_labels:
        access_labels = [l for l in labels if l[:len(cfg_label)] == cfg_label]
        for access_label in access_labels:
            var = pfp_utils.GetVariable(ds, access_label)
            for vattr in list(cfg["Variables"][cfg_label]["Attr"]):
                var["Attr"][vattr] = cfg["Variables"][cfg_label]["Attr"][vattr]
            pfp_utils.CreateVariable(ds, var)

    # force missing data to non-zero QC flag
    labels = list(ds.series.keys())
    for label in labels:
        var = pfp_utils.GetVariable(ds, label)
        condition = numpy.ma.getmaskarray(var["Data"]) & (numpy.mod(var["Flag"],10) == 0)
        idx = numpy.ma.where(condition == True)[0]
        if len(idx) != 0:
            var["Flag"][idx] = numpy.int32(8)
            pfp_utils.CreateVariable(ds, var)

    # write out the new ACCESS file
    msg = "Writing " + access_name
    logger.info(msg)
    access_path = os.path.join(base_path, site, "Data", "ACCESS", "cleaned")
    if not os.path.isdir(access_path):
        os.mkdir(access_path)
    access_uri = os.path.join(access_path, access_name)
    pfp_io.NetCDFWrite(access_uri, ds)
logger.info("Finished")
Пример #10
0
        # make sure there are Ws and Wd series
        copy_ws_wd(ds1)
        # make sure we have all the variables we want ...
        ds2 = include_variables(std, ds1)
        # ... but not the ones we don't
        exclude_variables(std, ds2)
        # update the global attributes
        change_global_attributes(std, ds2)
        # update the variable attributes
        change_variable_attributes(std, ds2)
        # Fc single point storage
        consistent_Fco2_storage(std, ds2, site)
        ofp = os.path.join(op, fn)
        #nf = pfp_io.nc_open_write(ofp)
        #pfp_io.nc_write_series(nf, ds2)
        pfp_io.NetCDFWrite(ofp, ds2)
        # run cfchecker on the netCDF file
        cmd = ["cfchecks", "-v 1.8", ofp]
        #subprocess.run(cmd, stdout=cfchecker_file)
cfchecker_file.close()
# parse the cfchecker output file and write separate files for errors and warnings
error_file = open(error_file_name, "w")
warning_file = open(warning_file_name, "w")
with open(cfchecker_file_name) as f:
    for line in f:
        if "CHECKING NetCDF FILE" in line:
            error_file.write(line)
            warning_file.write(line)
            continue
        if "Checking variable:" in line:
            parts = line.split(" ")