def do_ncconcat(self): """ Calls qcio.nc_concatenate """ logging.info(' Starting concatenation of netCDF files') self.do_progress(text='Loading control file ...') cf = qcio.load_controlfile(path='controlfiles') if len(cf)==0: self.do_progress(text='Waiting for input ...'); return self.do_progress(text='Concatenating files') qcio.nc_concatenate(cf) self.do_progress(text='Finished concatenating files') logging.info(' Finished concatenating files') logging.info("")
logging.info('Finished L3 processing with '+cfname) elif level.lower()=="fluxnet": # convert netCDF files to FluxNet CSV files for i in cf_batch["Levels"][level].keys(): cfname = cf_batch["Levels"][level][i] logging.info('Starting FluxNet output with '+cfname) cf = qcio.get_controlfilecontents(cfname) qcio.fn_write_csv(cf) logging.info('Finished FluxNet output with '+cfname) elif level.lower()=="concatenate": # concatenate netCDF files for i in cf_batch["Levels"][level].keys(): cfname = cf_batch["Levels"][level][i] logging.info('Starting concatenation with '+cfname) cf = qcio.get_controlfilecontents(cfname) qcio.nc_concatenate(cf) logging.info('Finished concatenation with '+cfname) elif level.lower()=="climatology": # climatology for i in cf_batch["Levels"][level].keys(): cfname = cf_batch["Levels"][level][i] logging.info('Starting climatology with '+cfname) cf = qcio.get_controlfilecontents(cfname) qcclim.climatology(cf) logging.info('Finished climatology with '+cfname) elif level.lower()=="cpd": # ustar threshold from change point detection for i in cf_batch["Levels"][level].keys(): cfname = cf_batch["Levels"][level][i] logging.info('Starting CPD with '+cfname) cf = qcio.get_controlfilecontents(cfname)
# add the ISD site ID var_all["Attr"]["isd_site_id"] = isd_site_id # copy the data and flag onto the matching times var_all["Data"][idx] = var_out["Data"] var_all["Flag"][idx] = var_out["Flag"] # put the data, flag and attributes into the all-in-one data structure qcutils.CreateVariable(ds_all, var_all) # write the netCDF file with the combined data for this year if len(fluxnet_id) == 0: nc_dir_path = os.path.join(out_base_path,site,"Data","ISD") nc_file_name = site+"_ISD_"+str(year)+".nc" else: nc_dir_path = os.path.join(out_base_path,fluxnet_id,"Data","ISD") nc_file_name = fluxnet_id+"_ISD_"+str(year)+".nc" if not os.path.exists(nc_dir_path): os.makedirs(nc_dir_path) nc_file_path = os.path.join(nc_dir_path,nc_file_name) nc_file = qcio.nc_open_write(nc_file_path) qcio.nc_write_series(nc_file, ds_all, ndims=1) cf_concat["Files"]["In"][str(n)] = nc_file_path # concatenate the yearly files for this site #cf_concat.filename = "../controlfiles/ISD/concat.txt" #cf_concat.write() qcio.nc_concatenate(cf_concat) # write the time steps out to an Excel file xl_file_path = os.path.join(isd_base_path, "ISD_site_timesteps.xls") xl_write_ISD_timesteps(xl_file_path, isd_time_steps) logger.info("All done")
if os.path.exists(access_file_path): access_file_path_list.append(access_file_path) if len(access_file_path_list) > 0: cf_file_path = os.path.join(cf_base_path,site+".txt") cf_concat = ConfigObj(indent_type=" ") cf_concat.filename = cf_file_path cf_concat["Options"] = {"NumberOfDimensions":1, "MaxGapInterpolate":0, "FixTimeStepMethod":"round", "Truncate":"No", "TruncateThreshold":50, "SeriesToCheck":[]} nc_file_name = site+"_ACCESS.nc" nc_out_path = os.path.join(nc_base_path,site,"Data","ACCESS",nc_file_name) cf_concat["Files"] = {"Out":{"ncFileName":nc_out_path},"In":{}} cf_concat["Files"]["In"]["0"] = nc_out_path for n, access_file_path in enumerate(access_file_path_list): cf_concat["Files"]["In"][str(n+1)] = access_file_path cf_concat.write() logger.info("Finished generating ACCESS concatenation control files") # now do the concatenation file_list = sorted(glob.glob(cf_base_path+"/*")) for item in file_list: cf_read = ConfigObj(infile=item) cf_path, cf_name = os.path.split(item) logger.info("Concatenating using "+cf_name) qcio.nc_concatenate(cf_read) logger.info("") logger.info("access_concatenate: all done")
elif level.lower()=="fluxnet": # convert netCDF files to FluxNet CSV files for i in cf_batch["Levels"][level].keys(): cfname = cf_batch["Levels"][level][i] logging.info('Starting FluxNet output with '+cfname) cf = qcio.get_controlfilecontents(cfname) qcio.fn_write_csv(cf) logging.info('Finished FluxNet output with '+cfname) logging.info('') elif level.lower()=="concatenate": # concatenate netCDF files for i in cf_batch["Levels"][level].keys(): cfname = cf_batch["Levels"][level][i] logging.info('Starting concatenation with '+cfname) cf_cc = qcio.get_controlfilecontents(cfname) qcio.nc_concatenate(cf_cc) logging.info('Finished concatenation with '+cfname) # now plot the fingerprints for the concatenated files opt = qcutils.get_keyvaluefromcf(cf_cc,["Options"],"DoFingerprints", default="yes") if opt.lower()=="no": continue cf_fp = qcio.get_controlfilecontents("controlfiles/standard/fingerprint.txt") if "Files" not in dir(cf_fp): cf_fp["Files"] = {} file_name = cf_cc["Files"]["Out"]["ncFileName"] file_path = ntpath.split(file_name)[0]+"/" cf_fp["Files"]["file_path"] = file_path cf_fp["Files"]["in_filename"] = ntpath.split(file_name)[1] cf_fp["Files"]["plot_path"] = file_path[:file_path.index("Data")]+"Plots/" if "Options" not in cf_fp: cf_fp["Options"]={} cf_fp["Options"]["call_mode"] = "batch" cf_fp["Options"]["show_plots"] = "no" logging.info('Doing fingerprint plots using '+cf_fp["Files"]["in_filename"])