def get_utime(self, fname):
     """ get unix time using date, hhmm information """
     date = self.get_date(fname)
     it = self.get_it(fname)
     it_tuple = tim.datetotuple("%s%s" % (date, it))
     it_utime = tim.mkgmtime(it_tuple)
     return it_utime
 def get_utime(self, fname):
     # get utime of date
     date = self.get_date(fname)
     it = self.get_it(fname)
     ft = self.get_ft(fname)
     it_tuple = tim.datetotuple("%s%s" % (date, it))
     it_utime = tim.mkgmtime(it_tuple)
     fh = int(ft)
     ft_utime = it_utime + 3600 * fh
     return (it_utime, ft_utime)
Esempio n. 3
0
def grid_convert(pres_time, infile_path, var_names_file, cdl_file, output_dir,
                 out_index_file, logfile, input_base, output_base, testing):

    success = 0
    logfile_path = logfile.get_log_path()

    # Construct output path names.
    ifo = name_schema.Fcst_fname(input_base, "nc")

    infile = os.path.basename(infile_path)

    file_date_str = ifo.get_date(infile)
    file_date_tup = tim.datetotuple(file_date_str)
    file_date = tim.mkgmtime(file_date_tup)

    output_path = os.path.join(output_dir, file_date_str)
    if (not os.path.exists(output_path)):
        logfile.write_time("Info: Executing mkdir -p %s\n" % output_path)
        if not testing:
            ret = os.system("mkdir -p %s 2> /dev/null" % output_path)
            if (ret != 0):
                logfile.write_time("Error: Unable to make directory.\n")
                return (0)

    ofo = name_schema.Fcst_fname(output_base, "nc")
    outfile = ofo.make_name(file_date_str, ifo.get_it(infile),
                            ifo.get_ft(infile))
    outfile_path = os.path.join(output_path, outfile)

    if (out_index_file.file_processed(outfile, file_date)):
        logfile.write_time("Info: File %s already exists.\n" % outfile)
        return (1)

    logfile_arg = ""
    if (logfile_path != ""):
        logfile_arg = "-l %s" % logfile_path
    command = "%s %s %s %s %s %s" % (convert_command, infile_path,
                                     var_names_file, cdl_file, outfile_path,
                                     logfile_arg)

    logfile.write_time("Info: Executing %s\n" % command)
    if not testing:
        ret = os.system(command)

        if (ret == 0):
            write_str = "%s %d" % (outfile, int(pres_time))
            out_index_file.write(write_str, file_date)
            success = 1
        else:
            logfile.write_time("Error: Unable to convert to file %s. \n" %
                               outfile)

    return (success)
def get_concat_meso_file(concat_meso_dir, date_string):

    ttup = time.strptime(date_string, "%Y%m%d.%H%M%S")
    time_val = tim.mkgmtime(ttup)

    index = index_file.Index_file(concat_meso_dir, sys_path.Index_base,
                                  sys_path.Index_period, "r",
                                  sys_path.Index_wait)
    f, d = get_file.get_file(index, time_val - (12 * 3600), time_val, 1, 1)

    if (len(f) == 0):
        obs_file = "none"
    else:
        obs_file = f[0]

    return obs_file
    def get_utime(self, fname):
        """get issue and forecast utc times"""
        date = self.get_date(fname)
        it = self.get_it(fname)
        if it != "":
            date_tuple = tim.datetotuple("%s" % date)
            date_utime = tim.mkgmtime(
                date_tuple)  # The unix time of the date in the file's name
            it_utime = date_utime + tim.tp2sec(self.get_it_pattern(), it)
        else:
            it_utime = None

        ft = self.get_ft(fname)
        if it != "" and ft != "":
            ft_utime = it_utime + tim.tp2sec(self.get_ft_pattern(), ft)
        else:
            ft_utime = None
        return (it_utime, ft_utime)
def get_obs_time(obs_time_str):
    year = obs_time_str[0:4]
    month = obs_time_str[5:7]
    mday = obs_time_str[8:10]
    hour = obs_time_str[11:13]
    minute = obs_time_str[14:16]
    second = obs_time_str[17:19]

    # Set up strings for user-supplied date and real-time
    obs_date = "%s%s%s" % (year, month, mday)
    obs_time = "%s%s%s" % (hour, minute, second)

    # Create a Unix time from date/time info
    dt = "%s.%s" % (obs_date, obs_time)
    ttup = time.strptime(dt, "%Y%m%d.%H%M%S")
    time_val = tim.mkgmtime(ttup)

    return (time_val)
def main():

    parser = OptionParser(
        usage=
        "%prog [options] rwh_config_file fcst_road_seg_file vdt_road_seg_file state forecast_hours_out"
    )
    parser.add_option(
        "-d",
        "--date",
        dest="date",
        help="run rwh application for specified date (YYYYMMDD.HHMM)")
    parser.add_option("-l", "--log", dest="log", help="base name of log file")

    (options, args) = parser.parse_args()

    if len(args) < 5:
        parser.print_help()
        sys.exit(2)

    config_file = args[0]
    fcst_road_seg_file = args[1]
    vdt_road_seg_file = args[2]
    state = args[3]
    fcst_hours_out = int(args[4])

    ret = 0

    if options.log:
        logg = log_msg.LogMessage(options.log, "pyl")
    else:
        logg = log_msg.LogMessage("")

    logg.write_starting("run_rwh.py")

    # Resolve date and time
    # Get optional date and time if we have it
    if options.date:
        date_time = options.date
        curr_date = date_time[:8]
        curr_hhmmss = date_time[9:] + "00"
        curr_hhmm = date_time[9:]
    else:
        curr_date = time.strftime("%Y%m%d", time.gmtime(time.time()))
        curr_hhmmss = time.strftime("%H%M%S", time.gmtime(time.time()))
        curr_hhmm = time.strftime("%H%M", time.gmtime(time.time()))

    dt = "%s.%s" % (curr_date, curr_hhmmss)
    ttup = time.strptime(dt, "%Y%m%d.%H%M%S")
    time_val = tim.mkgmtime(ttup)

    # Get the latest road-weather forecast file
    rdwx_dir = "%s/%s" % (vii_paths.FORECAST_DIR, "rdwx_fcst")
    index = index_file.Index_file(rdwx_dir, "index", 24, "r", 0)
    f, d = get_file.get_file(index, time_val - 7200, time_val, 1, 1)
    road_wx_opt_str = ""
    if (len(f) == 0):
        logg.write_warning(
            "No recent road-weather forecast file found in %s." % rdwx_dir)
    else:
        rdwx_file = f[0]
        road_wx_opt_str = "-w %s" % rdwx_file

    # Get the latest road-conditions forecast file
    road_cond_dir = "%s/%s" % (vii_paths.FORECAST_DIR, "merge_rec_tmt")
    index = index_file.Index_file(road_cond_dir, "index", 24, "r", 0)
    f, d = get_file.get_file(index, time_val - 7200, time_val, 1, 1)
    road_cond_opt_str = ""
    if (len(f) == 0):
        logg.write_warning("No recent road-cond forecast file found in %s." %
                           road_cond_dir)
    else:
        road_cond_file = f[0]
        road_cond_opt_str = "-r %s" % road_cond_file

    # Get the latest VDT segment-statistics file
    # Look back for the latest 5 minute file
    # Only look back 16 mintues to get the latest run
    # otherwise we do not run rwh
    #
    nearest_1min_time = (int(time_val) / 60) * 60
    max_lookback_time = nearest_1min_time - 1860
    loop_time = nearest_1min_time
    found = 0

    seg_stats_state_dir = "%s/%s_vdt_output" % (vii_paths.PROCESSED_DIR, state)

    output_date = ""
    output_hhmm = ""
    while (loop_time >= max_lookback_time):
        file_date = time.strftime("%Y%m%d", time.gmtime(loop_time))
        file_hhmm = time.strftime("%H%M", time.gmtime(loop_time))

        seg_stats_file = "%s/%s/segment_statistics.%s.%s.nc" % (
            seg_stats_state_dir, file_date, file_date, file_hhmm)

        if os.path.exists(seg_stats_file):
            found = 1
            output_date = file_date
            output_hhmm = file_hhmm
            break

        loop_time = loop_time - 60

    seg_stats_opt_str = ""
    if (not found):
        logg.write_warning(
            "No recent segment-statistics file found in %s, running with just forecast files."
            % seg_stats_state_dir)
    else:
        seg_stats_opt_str = "-s %s" % seg_stats_file

    # If we don't have any of the input files, exit.
    if (road_wx_opt_str == "" and road_cond_opt_str == ""
            and seg_stats_opt_str == ""):
        logg.write_error(
            "No recent road-wx, road-cond or seg-stats file found, can't run rwh."
        )
        logg.write_ending(exit_status=1, msg="run_rwh.py")
        sys.exit(1)

    # Setup command to run rwh
    #
    begin_time_val = time_val
    end_time_val_init = time_val + (fcst_hours_out * 3600)
    end_time_val = (int(end_time_val_init) / 3600) * 3600

    rwh_begin_time_string = time.strftime("%Y%m%d%H%M",
                                          time.gmtime(begin_time_val))
    rwh_end_time_string = time.strftime("%Y%m%d%H%M",
                                        time.gmtime(end_time_val))

    output_state_dir = "%s/%s_rwh_output" % (vii_paths.PROCESSED_DIR, state)
    output_dir = "%s/%s" % (output_state_dir, output_date)
    output_file = "%s_rwh.%s.%s.nc" % (state, output_date, output_hhmm)
    output_path = "%s/%s" % (output_dir, output_file)

    # Make dated output dir if necessary
    mkdir_path(output_dir)

    cmd_str = "rwh45 %s %s %s %s %s %s -l %s %s %s %s" % (
        config_file, rwh_begin_time_string, rwh_end_time_string,
        fcst_road_seg_file, vdt_road_seg_file, output_path, options.log,
        road_wx_opt_str, road_cond_opt_str, seg_stats_opt_str)

    # Setup Processed file object
    proc_file = processed_file.ProcessedFile(output_state_dir, "Processed")

    # Run the command line
    ret = run_cmd(cmd_str, logg)
    if ret == 0:
        proc_file.write_processed(curr_date, output_file,
                                  processed_file.PROC_SUCCESS)
    else:
        logg.write_error("run_cmd() failed for rwh command_str.")

    logg.write_ending(exit_status=ret, msg="run_rwh.py")
    return ret
age = "86400 86400"

obs_dir = "None"
concat_meso_dir = "%s/%s" % (rctm_path.Rctm_root_dir, "concat_meso")

output_dir = "%s/%s" % (rctm_path.Rctm_root_dir, "fec_rc_cur_tmt")

log_base = "%s/%s" % (rctm_path.Log_dir, "fec_rc_cur_tmt")

params_file = "%s/%s" % (Params_dir, "fec.rc.params")

try:
    date_time = os.environ['TEST_MODE_TIME']
    date_tup = time.strptime("%s" % (date_time), "%Y%m%d.%H%M%S")
    fcst_time = tim.mkgmtime(date_tup)
    date_str = "-d %s" % (date_time)
except:
    fcst_time = time.time()
    date_tup = time.gmtime(fcst_time)
    date_time = time.strftime("%Y%m%d.%H%M%S", date_tup)
    date_str = ''

fcst_time = (int(fcst_time) / 3600) * 3600

static_files = "%s %s %s" % (site_list, nbr_file, cdl_file)

concat_meso_file = obs.get_concat_meso_file(concat_meso_dir, date_time)
if (os.path.exists(concat_meso_file)):
    concat_meso_str = "-f -c %s" % concat_meso_file
else:
Esempio n. 9
0
        log_str = ""
    
    
    # Set up strings for user-supplied date and real-time
    if utime:
        curr_date = utime[:8]
        curr_time = utime[9:] + "00"
    else:
        curr_date = time.strftime("%Y%m%d", time.gmtime(time.time()))
        curr_time = time.strftime("%H%M%S", time.gmtime(time.time()))


    # Create a Unix time from date/time info
    dt = "%s.%s" % (curr_date, curr_time)
    ttup = time.strptime(dt,"%Y%m%d.%H%M%S")
    time_val = tim.mkgmtime(ttup)
    fcst_hr = curr_time[:2]
    
    cmd = "dump_fcst_data"
    
    # ********************** Dump Mesh Forecast ********************************
    
    # Get latest mesh input files
    index = index_file.Index_file(wx_input_dir, sys_path.Index_base, sys_path.Index_period, "r", sys_path.Index_wait)
    f, d = get_file.get_file(index, time_val - 86400, time_val, 1, 1)
    
    # If no input file, exit
    if (len(f) == 0):
        logf.write_time("Warning: No recent weather forecast file found.\n")
        wx_fcst_file = ""
    else:
 def get_utime(self, fname):
     """ get unix time of fname """
     date = self.get_date(fname)
     date_tuple = tim.datetotuple(date)
     date_utime = tim.mkgmtime(date_tuple)
     return date_utime
def main():

    parser = OptionParser(
        usage="%prog [options] cdf_to_csv_config_file csv_output_dir")
    parser.add_option(
        "-d",
        "--date",
        dest="date",
        help=
        "run cdf_to_csv_dicast application for specified date (YYYYMMDD.HHMM)")
    parser.add_option("-l", "--log", dest="log", help="base name of log file")

    (options, args) = parser.parse_args()

    if len(args) < 2:
        parser.print_help()
        sys.exit(2)

    config_file = args[0]
    output_dir = args[1]

    ret = 0

    if options.log:
        logg = log_msg.LogMessage(options.log, "pyl")
    else:
        logg = log_msg.LogMessage("")

    logg.write_starting("run_cdf_to_csv_dicast.py")

    # Resolve date and time
    # Get optional date and time if we have it
    if options.date:
        date_time = options.date
        curr_date = date_time[:8]
        curr_hhmmss = date_time[9:] + "00"
        curr_hhmm = date_time[9:]
    else:
        curr_date = time.strftime("%Y%m%d", time.gmtime(time.time()))
        curr_hhmmss = time.strftime("%H%M%S", time.gmtime(time.time()))
        curr_hhmm = time.strftime("%H%M", time.gmtime(time.time()))

    dt = "%s.%s" % (curr_date, curr_hhmmss)
    ttup = time.strptime(dt, "%Y%m%d.%H%M%S")
    time_val = tim.mkgmtime(ttup)

    # Make output dirs if necessary
    tmp_output_dir = "%s/tmp" % (output_dir)
    mkdir_path(output_dir)
    mkdir_path(tmp_output_dir)

    # Run cdf_to_cvs_dicast for all three states
    # then concatenate
    states = ["mi", "mn", "nv"]
    cat_str = ""
    for state in states:

        cap_state = ""
        if state == "mi":
            cap_state = "MI"
        elif state == "mn":
            cap_state = "MN"
        elif state == "nv":
            cap_state = "NV"

        fcst_site_list = "%s/%s_logicast_segment_sites_only.asc" % (
            vii_paths.SITE_LIST_DIR, cap_state)
        vdt_seg_file = "%s/%s_roads.20131111.nc" % (vii_paths.CDL_DIR, state)
        seg_stats_dir = "%s/%s_vdt_output" % (vii_paths.PROCESSED_DIR, state)

        #print "fcst_site_list: %s" % fcst_site_list
        #print "vdt_seg_file: %s" %  vdt_seg_file
        #print "seg_stats_dir: %s" % seg_stats_dir

        # Get the latest VDT segment-statistics file
        # Look back for the latest 5 minute file
        # Only look back 30 mintues to get the latest run
        #
        nearest_5min_time = (int(time_val) / 300) * 300
        max_lookback_time = nearest_5min_time - (30 * 60)
        loop_time = nearest_5min_time
        found = 0

        while (loop_time >= max_lookback_time):
            file_date = time.strftime("%Y%m%d", time.gmtime(loop_time))
            file_hhmm = time.strftime("%H%M", time.gmtime(loop_time))

            file_base = "segment_statistics.%s.%s" % (file_date, file_hhmm)
            seg_stats_file = "%s/%s/%s.nc" % (seg_stats_dir, file_date,
                                              file_base)
            tmp_output_file = "%s/%s_%s.csv" % (tmp_output_dir, state,
                                                file_base)

            if os.path.exists(seg_stats_file):
                found = 1
                break

            loop_time = loop_time - 60

        if (found):
            cmd_str = "cdf_to_csv_dicast %s %s %s %s %s" % (
                config_file, fcst_site_list, vdt_seg_file, seg_stats_file,
                tmp_output_file)
            # Run the command line
            ret = run_cmd(cmd_str, logg)
            if ret == 0:
                cat_str = cat_str + tmp_output_file + " "
            if ret != 0:
                logg.write_error(
                    "run_cmd() failed for cdf_to_csv_dicast cmd_str.")
        else:
            logg.write_warning(
                "No recent segment-statistics file found in %s, not running cdf_to_csv_dicast for %s"
                % (seg_stats_dir, state))

    #print "cat_str = ", cat_str

    # Set up paths and command for concatenating the state csv files
    output_file = "all_segment_statistics.%s.%s.csv" % (curr_date, curr_hhmm)
    output_path = "%s/%s" % (output_dir, output_file)
    cmd_str = "cat %s > %s" % (cat_str, output_path)

    # Run the command line
    ret = run_cmd(cmd_str, logg)
    if ret != 0:
        logg.write_error("run_cmd() failed for cat cmd_str.")

    logg.write_ending(exit_status=ret, msg="run_cdf_to_csv_dicast.py")
    return ret
Esempio n. 12
0
Cdl_dir = "%s/%s" % (rctm_path.Rctm_root_dir, "static_data/cdl")
Params_dir = "%s/%s" % (rctm_path.Rctm_root_dir, "static_data/params")

log_base = "%s/%s_rc" % (rctm_path.Log_dir, rctm_path.Dump_meso_base)

meso_input_dir = "%s/%s" % (rctm_path.Rctm_root_dir, "dec_data/obs/mesonet")
output_dir = "%s/%s" % (rctm_path.Rctm_root_dir, "dump_meso")

site_var_file = "%s/%s" % (Site_list_dir, "dump_meso_site_var_file.asc")
site_list = "%s/%s" % (Site_list_dir, "road_cond_sites.asc")


try:
    date_time = os.environ['TEST_MODE_TIME']
    date_tup = time.strptime("%s" % (date_time),"%Y%m%d.%H%M%S")
    cur_time = tim.mkgmtime(date_tup)
except:
    cur_time = time.time()

cur_date = time.strftime("%Y%m%d.%H%M", time.gmtime(cur_time))

one_hour_time = cur_time - 3600
one_hour_date = time.strftime("%Y%m%d.%H%M", time.gmtime(one_hour_time))

# Run for two hours back
#command = "%s -d %s -l %s %s %s %s %s" % (proc_script, two_hour_date, log_base, meso_input_dir, output_dir, site_var_file, site_list)
#ret = os.system(command)

# Run for one hour back
command = "%s -d %s -l %s %s %s %s %s" % (proc_script, one_hour_date, log_base, meso_input_dir, output_dir, site_var_file, site_list)
ret = os.system(command)
def main():

    parser = OptionParser(usage="%prog [options] ftp_input_dir output_dir")
    parser.add_option("-l", "--log", dest="log", help="base name of log file")

    (options, args) = parser.parse_args()

    if len(args) < 2:
        parser.print_help()
        sys.exit(2)

    input_dir = args[0]
    output_dir = args[1]

    input_file_name = "AkDOT_Road_Temps.csv"
    input_path = "%s/%s" % (input_dir, input_file_name)

    ret = 0

    if options.log:
        logg = log_msg.LogMessage(options.log, "pyl")
    else:
        logg = log_msg.LogMessage("")

    logg.write_starting("get_AK_mobile_data.py")

    # Resolve date and time
    curr_date = time.strftime("%Y%m%d", time.gmtime(time.time()))
    curr_hhmmss = time.strftime("%H%M%S", time.gmtime(time.time()))
    curr_hhmm = time.strftime("%H%M", time.gmtime(time.time()))

    dt = "%s.%s" % (curr_date, curr_hhmmss)
    ttup = time.strptime(dt, "%Y%m%d.%H%M%S")
    time_val = tim.mkgmtime(ttup)

    # Make output dir if necessary
    dest_dir = "%s/%s" % (output_dir, curr_date)
    mkdir_path(dest_dir)
    output_file_name = "alaska.%s.%s.csv" % (curr_date, curr_hhmm)
    output_path = "%s/%s" % (dest_dir, output_file_name)

    # Open input file and read in lines
    # Create a dictionary that contains the data for each site
    site_data = {}
    num_columns = 5
    num_valid_lines = 0
    if os.path.exists(input_path):
        in_fh = open(input_path, "r")
        for line in in_fh:
            line = line.rstrip('\n' + '\r')
            fields = line.split(',')
            if (len(fields) != 6):
                logg.write_error("Found %d fields in line %s, expecting 6" %
                                 (len(fields), line))
                continue

            # Skip the header
            if (re.match("label", fields[0])):
                continue

            # Valid entries start with the site-id which is a decimal (integer)
            if (re.match('\d', line)):
                #print "line: %s" % line
                site = fields[0]

                if site not in site_data.keys():
                    site_data[site] = []
                    for x in xrange(0, num_columns):
                        site_data[site].append([])

                site_data[site][0].append(fields[1])  # Date-Time (0)
                site_data[site][1].append(fields[2])  # Variable-name (1)
                site_data[site][2].append(fields[3])  # Value (2)
                site_data[site][3].append(fields[4])  # Latitude (3)
                site_data[site][4].append(fields[5])  # Longitude (4)

                num_valid_lines = num_valid_lines + 1

    # Check if we have any valid data, if not exit
    #print "num_valid_lines: %d" % num_valid_lines
    if (num_valid_lines == 0):
        logg.write_warning(
            "No data found in most recent input file, not creating output file"
        )
        logg.write_ending(exit_status=0, msg="get_AK_mobile_data.py")
        sys.exit(0)

    # Loop over the data dictionary and output the data
    # The key is the site-id
    out_fh = open(output_path, "w")

    # Write header
    header_line = "VehicleID,Time(UTC),Latitude,Longitude,AirTemp,RelativeHumidity,RoadTemp,DewpointTemp"
    out_fh.write("%s\n" % header_line)

    for key in site_data.keys():
        #print "key : %s" % key

        # Get the date-time for each site
        # The date-time is the first column in the dict (x = 0)
        # Use the first date-time row in the dict for output (y = 0)
        x = 0
        y = 0

        date_time_fields = site_data[key][x][y].split()

        date_fields = date_time_fields[0].split('/')
        month = date_fields[0]
        day = date_fields[1]
        year = date_fields[2]

        time_fields = date_time_fields[1].split(':')
        hour = time_fields[0]
        minute = time_fields[1]
        second = time_fields[2]

        site_dt = "%s%s%s.%s%s%s" % (year, month, day, hour, minute, second)
        site_ttup = time.strptime(site_dt, "%Y%m%d.%H%M%S")
        site_time_val = tim.mkgmtime(site_ttup)

        # Convert to UTC = AKST + 9hours
        site_time_val = site_time_val + (9 * 3600)

        # Get the latitude for each site
        # The latitude is the fourth column in the dict (x = 3)
        # Use the first latitude row in the dict (y = 0)
        x = 3
        y = 0
        lat = "%.4f" % float(site_data[key][x][y])

        # Get the longitude for each site
        # The longitude is the fith column in the dict (x = 4)
        # Use the first longitude row in the dict (y = 0)
        x = 4
        y = 0
        lon = "%.4f" % float(site_data[key][x][y])

        # Get the data value (T, rh, road-T, dewpt)
        # The data values is the third column in the dict (x = 2)
        # The first row (y = 0) is T, second row (y = 1) is rh, third row (y = 2) is road-T, fourth row (y = 3) is dewpt
        x = 2

        T = float(site_data[key][x][0])
        rh = float(site_data[key][x][1])
        road_T = float(site_data[key][x][2])
        dewpt = float(site_data[key][x][3])

        # QC the value
        if (T < -70.0 or T > 70.0):
            T_str = ""
        else:
            T_str = "%.2f" % T

        if (rh < 0.0 or rh > 100.0):
            rh_str = ""
        else:
            rh_str = "%.0f" % rh

        if (road_T < -70.0 or road_T > 70.0):
            road_T_str = ""
        else:
            road_T_str = "%.2f" % road_T

        if (dewpt < -70.0 or dewpt > 70.0):
            dewpt_str = ""
        else:
            dewpt_str = "%.2f" % dewpt

        output_line = "%s,%s,%s,%s,%s,%s,%s,%s" % (
            key, site_time_val, lat, lon, T_str, rh_str, road_T_str, dewpt_str)
        #print "%s" % output_line
        out_fh.write("%s\n" % output_line)

    out_fh.close()

    logg.write_ending(exit_status=ret, msg="get_AK_mobile_data.py")
    return ret
def main():

    parser = OptionParser(
        usage=
        "%prog [options] ftp_input_dir raw_output_dir cdl_file processed_output_dir"
    )
    parser.add_option("-l", "--log", dest="log", help="base name of log file")

    (options, args) = parser.parse_args()

    if len(args) < 4:
        parser.print_help()
        sys.exit(2)

    input_dir = args[0]
    raw_output_dir = args[1]
    cdl_file = args[2]
    proc_output_dir = args[3]

    ret = 0

    if options.log:
        logg = log_msg.LogMessage(options.log, "pyl")
    else:
        logg = log_msg.LogMessage("")

    logg.write_starting("frmt_and_run_alaska2probe_message.py")

    # Resolve date and time
    curr_date = time.strftime("%Y%m%d", time.gmtime(time.time()))
    curr_hhmmss = time.strftime("%H%M%S", time.gmtime(time.time()))
    curr_hhmm = time.strftime("%H%M", time.gmtime(time.time()))

    dt = "%s.%s" % (curr_date, curr_hhmmss)
    ttup = time.strptime(dt, "%Y%m%d.%H%M%S")
    time_val = tim.mkgmtime(ttup)

    # Find the latest csv file in the ftp directory
    # Loop back 24 hours and look for a file that has a specific month/day/hour format
    #
    input_file_prefix = "AKD"
    found = 0
    loop_time = time_val
    end_time = time_val - (24 * 3600)
    while (loop_time >= end_time):
        loop_year = time.strftime("%Y", time.gmtime(loop_time))
        loop_month = time.strftime("%m", time.gmtime(loop_time))
        loop_day = time.strftime("%d", time.gmtime(loop_time))
        tmp_loop_hh = curr_hhmm = time.strftime("%H", time.gmtime(loop_time))
        dec_loop_hh = int(tmp_loop_hh)
        loop_mm = curr_hhmm = time.strftime("%M", time.gmtime(loop_time))

        #print "loop_year: %s, loop_month: %s, loop_day: %s, dec_loop_hh: %d, loop_mm: %s" % (loop_year, loop_month, loop_day, dec_loop_hh, loop_mm)

        input_file_name = "AKD%s%s%d%s.csv" % (loop_month, loop_day,
                                               dec_loop_hh, loop_mm)
        input_path = "%s/%s" % (input_dir, input_file_name)

        if (os.path.exists(input_path)):
            #print "FOUND: %s" % input_path
            found = 1
            break

        loop_time = loop_time - 60

    if (not found):
        logg.write_warning(
            "No recent input file found, not creating output file")
        logg.write_ending(exit_status=0,
                          msg="frmt_and_run_alaska2probe_message.py")
        sys.exit(0)

    #print "input_path: %s" % input_path

    # Open input file and read in lines
    # Create a dictionary that contains the data for each site
    site_data = {}
    num_columns = 7
    num_valid_lines = 0
    if os.path.exists(input_path):
        in_fh = open(input_path, "r")
        for line in in_fh:
            line = line.rstrip('\n' + '\r')
            fields = line.split(',')
            if (len(fields) != 8):
                logg.write_error("Found %d fields in line %s, expecting 8" %
                                 (len(fields), line))
                continue

            # Skip the header
            if (re.match("label", fields[0])):
                continue

            # Valid entries start with the site-id which is a decimal (integer)
            if (re.match('\d', line)):
                #print "line: %s" % line
                site = fields[0]

                if site not in site_data.keys():
                    site_data[site] = []
                    for x in xrange(0, num_columns):
                        site_data[site].append([])

                site_data[site][0].append(fields[1])  # Date-Time (0)
                site_data[site][1].append(fields[2])  # Variable-name (1)
                site_data[site][2].append(fields[3])  # Value (2)
                site_data[site][3].append(fields[4])  # Latitude (3)
                site_data[site][4].append(fields[5])  # Longitude (4)
                site_data[site][5].append(fields[6])  # Speed (5)
                site_data[site][6].append(fields[7])  # Heading (6)

                num_valid_lines = num_valid_lines + 1

    # Check if we have any valid data, if not exit
    if (num_valid_lines == 0):
        logg.write_warning(
            "No data found in most recent input file, not creating output file"
        )
        logg.write_ending(exit_status=0,
                          msg="frmt_and_run_alaska2probe_message.py")
        sys.exit(0)

    # Make raw output dir if necessary
    raw_dest_dir = "%s/%s" % (raw_output_dir, curr_date)
    mkdir_path(raw_dest_dir)
    raw_output_file_name = "alaska.%s.csv" % curr_date
    raw_output_path = "%s/%s" % (raw_dest_dir, raw_output_file_name)

    # Check if output file exists, if it does don't write header
    header_flag = 1
    if (os.path.exists(raw_output_path)):
        header_flag = 0

    # Open the output file and append data to it.
    out_fh = open(raw_output_path, "a")

    # Write header
    header_line = "VehicleID,Time(UTC),Latitude,Longitude,Speed,Heading,AirTemp,RelativeHumidity,RoadTemp,DewpointTemp"
    if (header_flag):
        out_fh.write("%s\n" % header_line)

    # Loop over the data dictionary and output the data
    # The key is the site-id
    for key in site_data.keys():
        #print "key : %s" % key

        # Get the date-time for each site
        # The date-time is the first column in the dict (x = 0)
        # Use the first date-time row in the dict for output (y = 0)
        x = 0
        y = 0

        date_time_fields = site_data[key][x][y].split()

        date_fields = date_time_fields[0].split('/')
        month = date_fields[0]
        day = date_fields[1]
        year = date_fields[2]

        time_fields = date_time_fields[1].split(':')
        hour = time_fields[0]
        minute = time_fields[1]
        second = time_fields[2]

        site_dt = "%s%s%s.%s%s%s" % (year, month, day, hour, minute, second)
        site_ttup = time.strptime(site_dt, "%Y%m%d.%H%M%S")
        site_time_val = tim.mkgmtime(site_ttup)

        # Convert to UTC = AKST + 9hours for AST
        # Convert to UTC = AKST + 8hours for ADT
        site_time_val = site_time_val + (8 * 3600)

        # Get the latitude for each site
        # The latitude is the fourth column in the dict (x = 3)
        # Use the first latitude row in the dict (y = 0)
        x = 3
        y = 0
        lat = "%.4f" % float(site_data[key][x][y])

        # Get the longitude for each site
        # The longitude is the fith column in the dict (x = 4)
        # Use the first longitude row in the dict (y = 0)
        x = 4
        y = 0
        lon = "%.4f" % float(site_data[key][x][y])

        # Get the speed for each site
        # The speed is the sixth column in the dict (x = 5)
        # Use the first speed row in the dict (y = 0)
        x = 5
        y = 0
        speed = float(site_data[key][x][y])

        # Get the heading for each site
        # The heading is the seventh column in the dict (x = 6)
        # Use the first speed row in the dict (y = 0)
        x = 6
        y = 0
        heading = float(site_data[key][x][y])

        # Get the data value (T, rh, road-T, dewpt)
        # The data values is the third column in the dict (x = 2)
        # The first row (y = 0) is T, second row (y = 1) is rh, third row (y = 2) is road-T, fourth row (y = 3) is dewpt
        x = 2

        T = float(site_data[key][x][0])
        rh = float(site_data[key][x][1])
        road_T = float(site_data[key][x][2])
        dewpt = float(site_data[key][x][3])

        # QC the values and convert units (degF to degC)
        #
        # T is in degF
        # rh is in %
        # road_T is in degF
        # dewpt is in degF
        #
        if (T < -70.0 or T > 90.0):
            T_str = ""
        else:
            T_str = "%.2f" % ((T - 32.0) / 1.8)

        if (rh < 0.0 or rh > 100.0):
            rh_str = ""
        else:
            rh_str = "%.0f" % rh

        if (road_T < -70.0 or road_T > 90.0):
            road_T_str = ""
        else:
            road_T_str = "%.2f" % ((road_T - 32.0) / 1.8)

        if (dewpt < -70.0 or dewpt > 90.0):
            dewpt_str = ""
        else:
            dewpt_str = "%.2f" % ((dewpt - 32.0) / 1.8)

        if (speed < 0 or speed > 200):
            speed_str = ""
        else:
            speed_str = "%.2f" % (speed * 0.44704)

        if (heading < 0 or heading > 360):
            heading_str = ""
        else:
            heading_str = "%.0f" % heading

        output_line = "%s,%s,%s,%s,%s,%s,%s,%s,%s,%s" % (
            key, site_time_val, lat, lon, speed_str, heading_str, T_str,
            rh_str, road_T_str, dewpt_str)
        #print "%s" % output_line
        out_fh.write("%s\n" % output_line)

    out_fh.close()

    #
    # Run alaska2probe_message.py (this uses the alaska_reader.py class)
    #

    # Make processed output dir if necessary
    proc_dest_dir = "%s/%s" % (proc_output_dir, curr_date)
    mkdir_path(proc_dest_dir)
    proc_output_file_name = "alaska.%s.nc" % curr_date
    proc_output_path = "%s/%s" % (proc_dest_dir, proc_output_file_name)

    cmd_str = "alaska2probe_message.py %s %s %s 0" % (
        cdl_file, raw_output_path, proc_output_path)

    # Run the command line
    ret = run_cmd(cmd_str, logg)
    if ret != 0:
        logg.write_error(
            "run_cmd() failed for alaska2probe_message.py command_str.")

    logg.write_ending(exit_status=ret,
                      msg="frmt_and_run_alaska2probe_message.py")
    return ret