def mk_obs_conf(int_obs_dir, out_dir, utime, max_files): index = index_file.Index_file(int_obs_dir, sys_path.Index_base, sys_path.Index_period, "r", sys_path.Index_wait) obs_list, date_list = get_file.get_file(index, utime - max_files * 86400, utime, get_file.LAST, max_files) if len(obs_list) == 0: return "" obs_base = "obs_conf" curr_date = time.strftime("%Y%m%d", time.gmtime(utime)) curr_time = time.strftime("%H%M", time.gmtime(utime)) conf_file = "%s/%s.%s.%s.%s" % (out_dir, obs_base, curr_date, curr_time, sys_path.ASC_SUFFIX) try: if os.path.isdir(out_dir) == 0: os.makedirs(out_dir) f = open(conf_file, "w") for file in obs_list: f.write(file) f.write("\n") f.close() return conf_file except: return ""
def mk_current_plan(base_dir, utime=time.time()): in_dir = "%s/%s" % (base_dir, rctm_path.Site_plans_base) out_base = rctm_path.Current_plan_base out_dir = "%s/%s" % (base_dir, out_base) # list input dir, find files that contain pattern, make list. cur_str = "curr_" files = os.listdir(in_dir) for f in files: if string.find(f, cur_str) == -1: files.remove(f) if len(files) == 0: return 1 files.sort() #print files #utime = time.time() date_str = time.strftime("%Y%m%d", time.gmtime(utime)) time_str = time.strftime("%H%M%S", time.gmtime(utime)) dest_dir = "%s/%s" % (out_dir, date_str) out_file = "%s.%s.%s.%s" % (out_base, date_str, time_str, sys_path.ASC_SUFFIX) out_path = "%s/%s" % (dest_dir, out_file) #print dest_dir # Create destination directory as needed if os.path.isdir(dest_dir) == 0: try: os.makedirs(dest_dir) except: return 1 # Concatenate all the files file_str = "" for f in files: file_str = "%s/%s" % (in_dir, f) command = "cat %s >> %s" % (file_str, out_path) ret = os.system(command) if (ret != 0): return 1 # Create index entry index = index_file.Index_file(out_dir, sys_path.Index_base, sys_path.Index_period, "w", sys_path.Index_wait) out_line = "%s %d" % (out_file, utime) if (index != ""): index.write(out_line, utime) return 0
def run_filename_io_test(): AWC_in_ns_pattern = ["YYYYMMDD", "HHMM", "", "gE_ir4km_ch2", ""] AWC_in_ns_format = "%D_%I.%F%B%S" AWC_ns = name_schema.Name_schema(AWC_in_ns_format, AWC_in_ns_pattern) out_ns_pattern = ["east_3_9", "YYYYMMDD", "HHMM", "", "gini"] out_ns_format = "%B.%D.%I.%F%S" out_ns = name_schema.Name_schema(out_ns_format, out_ns_pattern) # ------------- # create an instance of the MapFileNameGeneral class # ------------- map_file_name = MapFileNameGeneral(AWC_ns, out_ns) output_dir = os.getcwd() # the output_dir contains an index file, index.20060704 with contains one line, # east_3_9.20060704.0015.gini 1152118740 file_str = "east_3_9.20060704.0015.gini 1152118740" out_ind = open("index.20060704", 'w') out_ind.write(file_str) out_ind.write("\n") out_ind.close() input_files = [ '20060704_0015.gE_ir4km_ch2', '20060704_0045.gE_ir4km_ch2', '20060704_0101.gE_ir4km_ch2', '20060704_0115.gE_ir4km_ch2' ] input_set = set([]) for i in xrange(len(input_files)): input_set.add(input_files[i]) output_index = index_file.Index_file(output_dir, ncvdefs.Index_base, ncvdefs.Index_period, "w", ncvdefs.Index_wait) # ------------- # create an instance of the UnlinkedFiles # ------------- get_unlinked_files = UnlinkedFiles(map_file_name, input_set, output_index) yyyymmdd = "20060704" unlinked_files = get_unlinked_files.get_files(yyyymmdd) if (unlinked_files == [ '20060704_0045.gE_ir4km_ch2', '20060704_0101.gE_ir4km_ch2', '20060704_0115.gE_ir4km_ch2' ]): print "successful test!" else: print "test failed"
def get_concat_meso_file(concat_meso_dir, date_string): ttup = time.strptime(date_string, "%Y%m%d.%H%M%S") time_val = tim.mkgmtime(ttup) index = index_file.Index_file(concat_meso_dir, sys_path.Index_base, sys_path.Index_period, "r", sys_path.Index_wait) f, d = get_file.get_file(index, time_val - (12 * 3600), time_val, 1, 1) if (len(f) == 0): obs_file = "none" else: obs_file = f[0] return obs_file
def main(): parser = OptionParser( usage= "%prog [options] rwh_config_file fcst_road_seg_file vdt_road_seg_file state forecast_hours_out" ) parser.add_option( "-d", "--date", dest="date", help="run rwh application for specified date (YYYYMMDD.HHMM)") parser.add_option("-l", "--log", dest="log", help="base name of log file") (options, args) = parser.parse_args() if len(args) < 5: parser.print_help() sys.exit(2) config_file = args[0] fcst_road_seg_file = args[1] vdt_road_seg_file = args[2] state = args[3] fcst_hours_out = int(args[4]) ret = 0 if options.log: logg = log_msg.LogMessage(options.log, "pyl") else: logg = log_msg.LogMessage("") logg.write_starting("run_rwh.py") # Resolve date and time # Get optional date and time if we have it if options.date: date_time = options.date curr_date = date_time[:8] curr_hhmmss = date_time[9:] + "00" curr_hhmm = date_time[9:] else: curr_date = time.strftime("%Y%m%d", time.gmtime(time.time())) curr_hhmmss = time.strftime("%H%M%S", time.gmtime(time.time())) curr_hhmm = time.strftime("%H%M", time.gmtime(time.time())) dt = "%s.%s" % (curr_date, curr_hhmmss) ttup = time.strptime(dt, "%Y%m%d.%H%M%S") time_val = tim.mkgmtime(ttup) # Get the latest road-weather forecast file rdwx_dir = "%s/%s" % (vii_paths.FORECAST_DIR, "rdwx_fcst") index = index_file.Index_file(rdwx_dir, "index", 24, "r", 0) f, d = get_file.get_file(index, time_val - 7200, time_val, 1, 1) road_wx_opt_str = "" if (len(f) == 0): logg.write_warning( "No recent road-weather forecast file found in %s." % rdwx_dir) else: rdwx_file = f[0] road_wx_opt_str = "-w %s" % rdwx_file # Get the latest road-conditions forecast file road_cond_dir = "%s/%s" % (vii_paths.FORECAST_DIR, "merge_rec_tmt") index = index_file.Index_file(road_cond_dir, "index", 24, "r", 0) f, d = get_file.get_file(index, time_val - 7200, time_val, 1, 1) road_cond_opt_str = "" if (len(f) == 0): logg.write_warning("No recent road-cond forecast file found in %s." % road_cond_dir) else: road_cond_file = f[0] road_cond_opt_str = "-r %s" % road_cond_file # Get the latest VDT segment-statistics file # Look back for the latest 5 minute file # Only look back 16 mintues to get the latest run # otherwise we do not run rwh # nearest_1min_time = (int(time_val) / 60) * 60 max_lookback_time = nearest_1min_time - 1860 loop_time = nearest_1min_time found = 0 seg_stats_state_dir = "%s/%s_vdt_output" % (vii_paths.PROCESSED_DIR, state) output_date = "" output_hhmm = "" while (loop_time >= max_lookback_time): file_date = time.strftime("%Y%m%d", time.gmtime(loop_time)) file_hhmm = time.strftime("%H%M", time.gmtime(loop_time)) seg_stats_file = "%s/%s/segment_statistics.%s.%s.nc" % ( seg_stats_state_dir, file_date, file_date, file_hhmm) if os.path.exists(seg_stats_file): found = 1 output_date = file_date output_hhmm = file_hhmm break loop_time = loop_time - 60 seg_stats_opt_str = "" if (not found): logg.write_warning( "No recent segment-statistics file found in %s, running with just forecast files." % seg_stats_state_dir) else: seg_stats_opt_str = "-s %s" % seg_stats_file # If we don't have any of the input files, exit. if (road_wx_opt_str == "" and road_cond_opt_str == "" and seg_stats_opt_str == ""): logg.write_error( "No recent road-wx, road-cond or seg-stats file found, can't run rwh." ) logg.write_ending(exit_status=1, msg="run_rwh.py") sys.exit(1) # Setup command to run rwh # begin_time_val = time_val end_time_val_init = time_val + (fcst_hours_out * 3600) end_time_val = (int(end_time_val_init) / 3600) * 3600 rwh_begin_time_string = time.strftime("%Y%m%d%H%M", time.gmtime(begin_time_val)) rwh_end_time_string = time.strftime("%Y%m%d%H%M", time.gmtime(end_time_val)) output_state_dir = "%s/%s_rwh_output" % (vii_paths.PROCESSED_DIR, state) output_dir = "%s/%s" % (output_state_dir, output_date) output_file = "%s_rwh.%s.%s.nc" % (state, output_date, output_hhmm) output_path = "%s/%s" % (output_dir, output_file) # Make dated output dir if necessary mkdir_path(output_dir) cmd_str = "rwh45 %s %s %s %s %s %s -l %s %s %s %s" % ( config_file, rwh_begin_time_string, rwh_end_time_string, fcst_road_seg_file, vdt_road_seg_file, output_path, options.log, road_wx_opt_str, road_cond_opt_str, seg_stats_opt_str) # Setup Processed file object proc_file = processed_file.ProcessedFile(output_state_dir, "Processed") # Run the command line ret = run_cmd(cmd_str, logg) if ret == 0: proc_file.write_processed(curr_date, output_file, processed_file.PROC_SUCCESS) else: logg.write_error("run_cmd() failed for rwh command_str.") logg.write_ending(exit_status=ret, msg="run_rwh.py") return ret
def main(): # Parse file name option args = [] input_dir = "" output_dir = "" logfile = "" # ------------- usage_str = "%prog input_dir_a input_dir_b output_dir satellite" parser = OptionParser(usage=usage_str) parser.add_option("-l", "--logfile_path", dest="logfile_path", help="log file") (options, args) = parser.parse_args() if len(sys.argv) < 3: parser.print_help() print "incorrect command line arguements" sys.exit(2) script = os.path.basename(sys.argv[0]) input_dir = args[0] output_dir = args[1] satellite = args[2] logfile_path = options.logfile_path channel = ncvdefs.sat_channels # ------------- # get the base and suffix info from the ncvdefs.py script # ------------- input_suffix = output_suffix input_base = satellite + "_" + output_base # ------------- # the output index filename should have the name of the channel in it. # ------------- output_file_base = satellite + "_" + output_base # ------------- pres_time = time.time() # ------------- # set up the log file # ------------- logfile = log.Log(logfile_path, ncvdefs.PYL_SUFFIX) logfile.write_time("Starting %s\n" % script) logfile_p = logfile.get_log_path() logfile_arg = "" if (logfile_p != ""): logfile_arg = "-l %s" % logfile_p # ------------- # create an instance of the name_schema # ------------- in_fno = name_schema.Data_fname(input_base, input_suffix) out_fno = name_schema.Data_fname(output_file_base, output_suffix) # ------------- # set up the index files, the number of input_index files changes depending on the number of channels # ------------- input_index_base_list = [] input_index_list = [] for channel_ind in xrange(len(channel)): input_index_base_list.append(index_base + "_" + channel[channel_ind]) input_index_list.append( index_file.Index_file(input_dir, input_index_base_list[channel_ind], index_file_hours, "r", index_file_wait)) input_index = input_index_list[channel_ind] output_index_base = index_base output_index = index_file.Index_file(output_dir, output_index_base, index_file_hours, "w", index_file_wait) # ------------- # create an instance of the MapFileName class # ------------- map_file_name = filename_io.MapFileName(out_fno, in_fno) # ------------- # create an instance of the UnprocessedFiles # ------------- get_proc_files = filename_io.ProcessedFiles(map_file_name, input_index_list, output_index) # --------- # get the date from the input index name # --------- input_str = os.path.basename(input_index.curr_index_file) input_split = string.split(input_str, ".") init_input_date = input_split[1] input_sec = tim.date2sec(init_input_date) # --------- # loop over the number of days to process # --------- for ndays in xrange(0, index_file_prior_days + 1): prior_days_sec = input_sec - ndays * 86400 (input_date, hr_str) = tim.sec2date(prior_days_sec) # --------- # call the method get_files in the UnprocessedFiles class, and pass in the date # --------- proc_files = get_proc_files.get_files(input_date) if (len(proc_files) == 0): continue else: # --------- # write the file names to the output file # --------- for ind in proc_files: file_time = in_fno.get_utime(ind) output_name = out_fno.make_name_utime(file_time) # create an instance of the index_file_io class index_file_io = sys_functions.IndexFileIo() line = index_file_io.create_index_line(output_name, pres_time) output_index.write(line, file_time) logfile.write_time("Ending %s, status = 0\n\n" % script) return 0
def main(): # Parse file name option args = [] input_dir = "" output_dir = "" cdl_file = "" params_dir = "" logfile = "" # ------------- usage_str = "%prog input_dir output_dir cdl_file params_dir" parser = OptionParser(usage = usage_str) parser.add_option("-l", "--logfile_path", dest="logfile_path", help="log file") (options, args) = parser.parse_args() if len(sys.argv) < 6: parser.print_help() print "incorrect command line arguements" sys.exit(2) script = os.path.basename(sys.argv[0]) input_dir = args[0] output_dir = args[1] cdl_file = args[2] params_dir = args[3] channel = args[4] satellite = args[5] logfile_path = options.logfile_path # ------------- # get the base and suffix info from the ncvdefs.py script # ------------- input_suffix = input_gini_suffix input_base = satellite + "_" + channel # ------------- # the output index filename should have the name of the channel in it. # ------------- output_file_base = satellite + "_" + output_base # ------------- pres_time = time.time() exec_cmd = "Gini2nc" # ------------- # set up the log file # ------------- logfile = log.Log(logfile_path, ncvdefs.PYL_SUFFIX) logfile.write_time("Starting %s\n" % script) logfile_p = logfile.get_log_path() logfile_arg = "" if (logfile_p != ""): logfile_arg = "-l %s" % logfile_p # ------------- # create an instance of the name_schema # ------------- in_fno = name_schema.Data_fname(input_base, input_suffix) out_fno = name_schema.Data_fname(output_file_base, output_suffix) # ------------- # set up the index files # ------------- input_index = index_file.Index_file(input_dir,index_base,index_file_hours,"r",index_file_wait) output_index_base = index_base + "_" + channel output_index=index_file.Index_file(output_dir,output_index_base,index_file_hours,"w",index_file_wait) # ------------- # create an instance of the MapFileName class # ------------- map_file_name = filename_io.MapFileName(in_fno, out_fno) # ------------- # create an instance of the UnprocessedFiles # ------------- get_unproc_files = filename_io.UnprocessedFiles(map_file_name, input_index, output_index) # --------- # get the date from the input index name # --------- input_str = os.path.basename(input_index.curr_index_file) input_split = string.split(input_str, ".") init_input_date = input_split[1] input_sec = tim.date2sec(init_input_date) # --------- # loop over the number of days to process # --------- np = 0 for ndays in xrange(0,index_file_prior_days+1): prior_days_sec = input_sec - ndays*86400 (input_date, hr_str) = tim.sec2date(prior_days_sec) # ------------- # call the method get_files in the UnprocessedFiles class, and pass in the date # ------------- unproc_files = get_unproc_files.get_files(input_date) if (len(unproc_files) == 0 ): continue # --------- # loop over all the unprocessed files and process them # --------- else: for ind in unproc_files: file_path = os.path.join(input_dir,input_date) in_path = os.path.join(file_path, ind) file_bname = os.path.basename(in_path) file_time = in_fno.get_utime(file_bname) output_name = out_fno.make_name_utime(file_time) # remove the initial '.' from the file name if output_name[0] == ".": output_name = output_name[1:] date_str = output_index.get_date_string(file_time) output_dated_dir = os.path.join(output_dir, date_str) if (not os.path.exists(output_dated_dir)): os.makedirs(output_dated_dir) output_path = os.path.join(output_dated_dir, output_name) command = "%s %s %s %s %s %s" %(exec_cmd, logfile_arg, in_path, params_dir, output_path, cdl_file) logfile.write_time(" Executing: %s\n" % command) ret = os.system(command) if (ret != 0): logfile.write_time(" Error: Executing %s\n" % command) logfile.write_time("Ending %s, status = 1\n\n" % script) return 1 else: index_file_io = sys_functions.IndexFileIo() index_line = index_file_io.create_index_line(output_name, pres_time) output_index.write(index_line, file_time) np = np + 1 if np == ncvdefs.Max_sat_files_to_process: logfile.write_time(" Info. Reached maximum file process limit (%d).\n" % np) logfile.write_time("Ending %s, status = 0\n\n" % script) return 0 logfile.write_time("Ending %s, status = 0\n\n" % script) return 0
filtered_list.reverse() return filtered_list if __name__ == "__main__": import time import sys if len(sys.argv) < 4: print "usage: %s index_file_base_dir start_time_offset end_time_offset number_files" print "The start_time_offset and end_time_offset variables are in hours relative" print "to the current time. They should be negative to go back in time." sys.exit(2) ifile = index_file.Index_file(sys.argv[1], "index", 24, "r", 0) # print ifile start_time_off = int(sys.argv[2]) end_time_off = int(sys.argv[3]) num_files = int(sys.argv[4]) date = int(time.time()) # print 'date ', date # print 'num_files ', num_files # f,d = get_present_day_file(ifile, date + 3600 * start_time_off, date + 3600 * end_time_off, FIRST, num_files) f, d = get_file(ifile, date + 3600 * start_time_off, date + 3600 * end_time_off, FIRST, num_files) print f print d
curr_date = time.strftime("%Y%m%d", time.gmtime(time.time())) curr_time = time.strftime("%H%M%S", time.gmtime(time.time())) # Create a Unix time from date/time info dt = "%s.%s" % (curr_date, curr_time) ttup = time.strptime(dt,"%Y%m%d.%H%M%S") time_val = tim.mkgmtime(ttup) fcst_hr = curr_time[:2] cmd = "dump_fcst_data" # ********************** Dump Mesh Forecast ******************************** # Get latest mesh input files index = index_file.Index_file(wx_input_dir, sys_path.Index_base, sys_path.Index_period, "r", sys_path.Index_wait) f, d = get_file.get_file(index, time_val - 86400, time_val, 1, 1) # If no input file, exit if (len(f) == 0): logf.write_time("Warning: No recent weather forecast file found.\n") wx_fcst_file = "" else: wx_fcst_file = f[0] if (wx_fcst_file != ""): # Make output path wx_out_dir = os.path.join(out_dir, "wx") dest_dir = os.path.join(wx_out_dir, curr_date) out_name = "%s.%s.%s.%s" % ("dump_fcst_wx", curr_date, curr_time[:4], sys_path.ASC_SUFFIX)
# Set up strings for user-supplied date and real-time if utime: curr_date = utime[:8] curr_time = utime[9:] else: curr_date = time.strftime("%Y%m%d", time.gmtime(time.time())) curr_time = time.strftime("%H%M", time.gmtime(time.time())) # Create a Unix time from date/time info dt = "%s.%s" % (curr_date, curr_time) ttup = time.strptime(dt,"%Y%m%d.%H%M%S") time_val = tim.mkgmtime(ttup) output_index = index_file.Index_file(output_dir, sys_path.Index_base, sys_path.Index_period, "w", sys_path.Index_wait) # loop over all possible hours for h in range(24): # Make the output path dest_dir = "%s/%s" % (output_dir, curr_date) out_name = "%s_fcst.%s.%02d00.nc" % (mdl_base, curr_date, h) output_file = "%s/%s" % (dest_dir, out_name) if output_index.file_processed(out_name, curr_date): #logf.write_time("Info: file processed: %s\n" % out_name) continue input_file = "%s/%s/%s/%s.%s.i%02d00.nc" % (input_dir, mdl_base, curr_date, mdl_base, curr_date, h)
def main(): arg_len = (len(sys.argv)) if (arg_len < 9): print "Not enough command line arguments" usage() script = os.path.basename(sys.argv[0]) west_input_dir = sys.argv[1] west_input_base = sys.argv[2] east_input_dir = sys.argv[3] east_input_base = sys.argv[4] cdl_file = sys.argv[5] output_dir = sys.argv[6] logfile_path = "" i = 5 while (i < arg_len): if (sys.argv[i] == "-L"): i = i + 1 logfile_path = sys.argv[i] i = i + 1 logfile = log.Log(logfile_path) if (logfile_path == ""): logfile_arg = "" else: logfile_arg = "-l %s" % logfile_path logfile.set_suffix(ncvdefs.PYL_SUFFIX) logfile.write_time("Starting. %s\n" % script) west_input_index = index_file.Index_file(west_input_dir, ncvdefs.Index_base, ncvdefs.Index_period, "r", ncvdefs.Index_wait) east_input_index = index_file.Index_file(east_input_dir, ncvdefs.Index_base, ncvdefs.Index_period, "r", ncvdefs.Index_wait) output_index = index_file.Index_file(output_dir, ncvdefs.Index_base, ncvdefs.Index_period, "w", ncvdefs.Index_wait) pres_time = time.time() # minutes to look back at previous file num_min = ncvdefs.blend_time_window + 10 start_time = pres_time - num_min * 60 nc2conus_command = "blend_sat" get_nc_var_command = "get_nc_var.py" output_base = ncvdefs.Cloud_mask_base suffix = ncvdefs.NETCDF_SUFFIX west_last_file = get_file.get_last_file(west_input_index, start_time, pres_time) east_last_file = get_file.get_last_file(east_input_index, start_time, pres_time) if (west_last_file == "" and east_last_file == ""): logfile.write_time( " Info: No GOES west or GOES east files within last %d minutes.\n" % num_min) logfile.write_time("Ending. Exit status = 0.\n") sys.exit(1) # check to see if the last files are within the the time delta (new_cloud_time, old_cloud_time, files_in_blend, west_last_file, east_last_file, last_file) = check_latest_file(west_last_file, east_last_file, pres_time, ncvdefs.blend_time_window) if (east_last_file == "" and west_last_file == ""): logfile.write_time( " No files within the blend_time_window = %d sec to process.\n" % ncvdefs.blend_time_window) logfile.write_time("Ending. Exit status = 0.\n") sys.exit(0) out_fno = name_schema.Data_fname(output_base, suffix) # create output name, which corresponds to the time the file was processed output_name = out_fno.make_name_utime(int(pres_time)) #date_str = output_index.get_date_string(old_cloud_time) #date_str = output_inde.get_date_string(new_cloud_time) (pres_date, pres_hhmm) = tim.sec2date(pres_time) date_str = pres_date output_dated_dir = os.path.join(output_dir, date_str) if (not os.path.exists(output_dated_dir)): os.makedirs(output_dated_dir) output_path = os.path.join(output_dated_dir, output_name) if (west_last_file != "" and east_last_file != ""): command = "%s -w %s -e %s -c %s -o %s %s" % ( nc2conus_command, west_last_file, east_last_file, cdl_file, output_path, logfile_arg) else: command = "%s %s %s %s %s %s %d" % (get_nc_var_command, last_file, cdl_file, output_path, "is_cloud", "files_in_blend", files_in_blend) logfile.write_time(" Info: Executing %s.\n" % command) ret = os.system(command) if (ret != 0): logfile.write_time("Error: Executing %s.\n" % command) logfile.write_time("Ending. Exit status = 1.\n") sys.exit(1) else: write_line = "%s %s" % (output_name, int(pres_time)) output_index.write(write_line, pres_time) logfile.write_time("Ending. %s. Exit status = 0 \n" % script) return 0
if utime: curr_date = utime[:8] curr_time = utime[9:] + "00" else: curr_date = time.strftime("%Y%m%d", time.gmtime(time.time())) curr_time = time.strftime("%H%M%S", time.gmtime(time.time())) # Create a Unix time from date/time info dt = "%s.%s" % (curr_date, curr_time) ttup = time.strptime(dt,"%Y%m%d.%H%M%S") time_val = tim.mkgmtime(ttup) # Get latest concatenated obs files index = index_file.Index_file(meso_input_dir, sys_path.Index_base, sys_path.Index_period, "r", sys_path.Index_wait) f, d = get_file.get_file(index, time_val-(12*3600), time_val, 1, 1) # If no input file, set obs_file to "none" if (len(f) == 0): logf.write_time("Warning: No recent concat_meso file found.\n") obs_file = "none" else: obs_file = f[0] #print "obs_file = ", obs_file # Get the latest dump_fcst file index = index_file.Index_file(fcst_input_dir, sys_path.Index_base, sys_path.Index_period, "r", sys_path.Index_wait) f, d = get_file.get_file(index, time_val-(45*3600), time_val, 1, 1) # If no input file, exit
if utime: curr_date = utime[:8] curr_time = utime[9:] + "00" else: curr_date = time.strftime("%Y%m%d", time.gmtime(time.time())) curr_time = time.strftime("%H%M%S", time.gmtime(time.time())) # Create a Unix time from date/time info dt = "%s.%s" % (curr_date, curr_time) ttup = time.strptime(dt, "%Y%m%d.%H%M%S") time_val = tim.mkgmtime(ttup) # Get dump_meso files # dump_meso_ifile = index_file.Index_file(input_dir, sys_path.Index_base, sys_path.Index_period, "r", sys_path.Index_wait) max_days = rctm_path.NUM_OUTPUT_DAYS max_files = max_days * 24 max_time_back = time_val - (max_days * 86400) #print max_time_back dump_meso_list, date_list = get_file.get_file(dump_meso_ifile, max_time_back, time_val, get_file.LAST, max_files) # Concatenate the hourly files into one file # logf.write_time("Starting.\n") tmp_dir = os.path.join(out_dir, "tmp") tmp_file_base = "tmp_concat_meso"
cloud_mask_cdl = args[5] config_file = args[6] scan_type = args[7] logfile = log.Log(logfile_path) if (logfile_path == ""): logfile_arg = "" else: logfile_arg = "-l %s" % logfile_path logfile.set_suffix(ncvdefs.PYL_SUFFIX) logfile.write_time("Starting %s\n" % script) logfile.write_time(" Info: file type = %s\n" % input_base) out_index = index_file.Index_file(output_dir, ncvdefs.Index_base, ncvdefs.Index_period, "w", ncvdefs.Index_wait) file_suffix = ncvdefs.NETCDF_SUFFIX output_str = ncvdefs.Cloud_mask_base input_name = "%s.%s.%s" % (input_base, date_str, file_suffix) output_name = "%s_%s.%s.%s.%s" % (output_base, output_str, date_str[0:8], date_str[8:12], file_suffix) sat_file_time = date_str[0:8] # if output file already exists, exit if (out_index.file_processed(output_name, sat_file_time)): logfile.write_time( " Info: the file %s, has already been processed.\n" % output_name) logfile.write_time("Ending: exit status = 0\n") sys.exit(0)
def main(): arg_len = (len(sys.argv)) if (arg_len < 5): print "Not enough command line arguments" usage() script = os.path.basename(sys.argv[0]) input_dir = sys.argv[1] input_base = sys.argv[2] output_dir = sys.argv[3] output_base = sys.argv[4] output_suffix = sys.argv[5] logfile_path = "" i = 5 while (i<arg_len): if (sys.argv[i] == "-l"): i = i + 1 logfile_path = sys.argv[i] i = i+1 logfile = log.Log(logfile_path) if (logfile_path == ""): logfile_arg = "" else: logfile_arg = "-l %s" % logfile_path logfile.set_suffix(ncvdefs.PYL_SUFFIX) logfile.write_time("Starting %s\n" % script) num_days = ncvdefs.Index_prior_days # Generate day list date = datetime.datetime.utcnow() day_list = [0] * (num_days+1) for n in xrange(num_days+1): prev_date = date - datetime.timedelta(days=n) day_list[n] = prev_date.strftime("%Y%m%d") pres_time = time.time() # make sure the input directory exists if not exit with an error if (not os.path.exists(input_dir)): logfile.write_time("Error. in %s. The input directory %s does not exist. \n" %(script,input_dir)) logfile.write_time("Ending %s, status = 1\n\n" % script) return 1 # get the list all the files in the input directory # note the files are not in dated subdirectories # so we need to go through the list. Filter out any hidden ('.') files. #dir_list = os.listdir(input_dir) dir_list = [fn for fn in os.listdir(input_dir) if fn[0] != '.'] dir_list.sort() AWC_in_ns_pattern = ["YYYYMMDD","HHMM","",input_base,""] AWC_in_ns_format = "%D_%I.%F%B%S" AWC_ns = name_schema.Name_schema(AWC_in_ns_format,AWC_in_ns_pattern) out_ns_pattern = [output_base,"YYYYMMDD","HHMM","",output_suffix] out_ns_format = "%B.%D.%I.%F%S" out_ns = name_schema.Name_schema(out_ns_format,out_ns_pattern) # ------------- # create an instance of the MapFileNameGeneral class # ------------- out_fno = name_schema.Data_fname(output_base, output_suffix) map_file_name = filename_io.MapFileNameGeneral(AWC_ns,out_ns) output_index = index_file.Index_file(output_dir, ncvdefs.Index_base, ncvdefs.Index_period, "w", ncvdefs.Index_wait) # put the file names in sets corresponding to the dates if day_list != []: # get a list of dates corresponding to the file names date_list = [] for file_ind in xrange(len(dir_list)): curr_file = dir_list[file_ind] # parse the file curr_date = AWC_ns.get_date(curr_file) ut = AWC_ns.get_utime(curr_file) date_list.append(curr_date) # loop over the day list to check for files for yyyymmdd in day_list: # put the above file names in a set input_set = set([]) for date_ind in xrange(len(date_list)): if date_list[date_ind] == yyyymmdd: input_set.add(dir_list[date_ind]) # ------------- # create an instance of the UnlinkedFiles # ------------- get_unlinked_files = filename_io.UnlinkedFiles(map_file_name,input_set,output_index) # --------- # call the method get_files in the UnlinkedFiles # class, pass in the date # --------- unlinked_files = get_unlinked_files.get_files(yyyymmdd) if (len(unlinked_files) == 0 ): continue else: # check to see if the output dated dir exists if not create it. output_dated_dir = os.path.join(output_dir, yyyymmdd) if (not os.path.exists(output_dated_dir)): os.makedirs(output_dated_dir) # --------- # link the unlinked files & write the file # names in the output file # --------- for ind in unlinked_files: (file_time,f_time) = AWC_ns.get_utime(ind) output_name = out_fno.make_name_utime(file_time) # link the files input_file = os.path.join(input_dir,ind) output_file = os.path.join(output_dated_dir,output_name) logfile.write_time(" Info: ln -s %s %s\n" % (input_file, output_file)) ret = index_utils.link_dirs(input_file, output_file) if ret == 0: # create an instance of the index_file_io class index_file_io = sys_functions.IndexFileIo() line = index_file_io.create_index_line(output_name, pres_time) output_index.write(line, yyyymmdd) else: logfile.write_time(" Error: Creating symlink.\n") logfile.write_time("Ending %s, status = 1\n\n" %script) logfile.write_time("Ending %s, status = 0\n\n" %script) return 0