def timeout_check(): """Determine how long this process has been running. Kill it if over TIME_LIMIT """ NOW = time.time() if NOW - THEN > TIME_LIMIT * 3600: timestamp("[FATAL ERROR]", "Exceeded time limit") sys.exit(1)
def read_ww3_data(filename): """Read Wave Watch III grib2 data""" timestamp("INFO", "Reading: %s" % (filename)) data = {} ds = xr.open_dataset(filename, engine='cfgrib') data = { 'lons': ds.longitude.values - 360., 'lats': ds.latitude.values, 'wvhgt': ds.swh.values * M2FT, 'wspd': ds.ws.values * MS2KT, 'time': pd.to_datetime(ds.valid_time.values), 'runtime': pd.to_datetime(ds.time.values) } return data
from subprocess import Popen, PIPE try: from shutil import which except ImportError: from distutils.spawn import find_executable as which from configs import BUOYS, BUOY_URL, DATA_DIR from cron_helper import timestamp # Logic to find wget or curl on the system WGET = which('wget') if not WGET: CURL = which('curl') if not WGET and not CURL: raise ValueError("Neither wget nor curl found on the system. Exiting") for buoy in BUOYS.keys(): url = "%s/%s.txt" % (BUOY_URL, buoy) if WGET: p = Popen([WGET, '-O', '%s/%s.txt' % (DATA_DIR, buoy), url], stderr=PIPE) elif CURL: p = Popen([CURL, '-o', '%s/%s.txt' % (DATA_DIR, buoy), url], stderr=PIPE) p.wait() timestamp("INFO", url)
def main(start, end, nproc=None): files = [] start1 = start while start1 <= end: date_string = datetime.strftime(start1, "%Y-%m-%d") files.extend(glob("%s/%s/*.grib2" % (DATA_DIR, date_string))) start1 += timedelta(days=1) arr = [] if not nproc: for f in files: arr.append(read_ww3_data(f)) else: pool = Pool(int(nproc)) arr = pool.map(read_ww3_data, files) colors = plt.cm.Purples_r(np.linspace(0, 1, len(files))) for stn_id in BUOYS.keys(): plt.style.use('%s/style.mplstyle' % (SCRIPT_PATH)) fig, ax = plt.subplots(2, figsize=(20,8), sharex='col') fig.subplots_adjust(hspace=0.15) buoy_data = read_buoy_data(DATA_DIR, stn_id) points = [BUOYS[stn_id][1], BUOYS[stn_id][0]] # WW3 timeseries plots for i in range(len(arr)): if i == 0: idx = nearest_idx(points, arr[i]['lons'], arr[i]['lats']) if i == len(arr)-1: ww3_run = 'current' else: ww3_run = 'past' ww3_prop['past']['color'] = colors[i] ax[0].plot(arr[i]['time'][0:FHR], arr[i]['wvhgt'][0:FHR,idx[0][0],idx[0][1]], label=arr[i]['runtime'], **ww3_prop[ww3_run]) ax[1].plot(arr[i]['time'][0:FHR], arr[i]['wspd'][0:FHR,idx[0][0],idx[0][1]], **ww3_prop[ww3_run]) # Adding in buoy data if buoy_data: ax[1].quiver(buoy_data['time'], buoy_data['wspd_adj'], buoy_data['u'], buoy_data['v'], **barb_prop) ax[1].scatter(buoy_data['time'], buoy_data['wspd_adj'], **buoy_prop) #ax[1].scatter(buoy_data['time'], buoy_data['wspd'], **raw_buoy_prop) # For stations that don't report wave heights if np.nansum(buoy_data['wvhgt']) > 0: ax[0].scatter(buoy_data['time'], buoy_data['wvhgt'], **buoy_prop, label=stn_id) ax[1].xaxis.set_major_formatter(DateFormatter("%m/%d %H")) # Set the axes limits ax[0].set_ylim(0, arr[i]['wvhgt'][0:FHR,idx[0][0],idx[0][1]].max()+5) ax[1].set_ylim(0, arr[i]['wspd'][0:FHR,idx[0][0],idx[0][1]].max()*2) ax[1].set_xlim([start, end + timedelta(hours=FHR)]) ax[0].set_ylabel('Significant Wave Height (ft)', fontsize=12) ax[1].set_ylabel('Wind Speed (kts) (10-m adjusted)', fontsize=12) ax[1].set_xlabel('Valid Date', fontsize=12) # Adding vertical time reference ymin, ymax = ax[0].get_ylim() ax[0].vlines(NOW, ymin, ymax, linestyles='dashed', colors='#ffffff') ax[0].text(NOW+timedelta(hours=1), ymax, NOW.strftime('%c'), color='red') ymin, ymax = ax[1].get_ylim() ax[1].vlines(NOW, ymin, ymax , linestyles='dashed', colors='#ffffff') # ------------------------------------------------------------------------------ # Adjusting x- and y-axes. Could be moved to a function. # ------------------------------------------------------------------------------ ax[0].tick_params('both', length=7.5, width=2, which='major') ax[0].spines['top'].set_color('none') ax[0].spines['right'].set_color('none') ax[0].spines['bottom'].set_linewidth(2) ax[0].spines['left'].set_linewidth(2) adjust_spines(ax[0], ['left', 'bottom']) ax[1].tick_params('both', length=7.5, width=2, which='major') ax[1].spines['top'].set_color('none') ax[1].spines['right'].set_color('none') ax[1].spines['bottom'].set_linewidth(2) ax[1].spines['left'].set_linewidth(2) adjust_spines(ax[1], ['left', 'bottom']) plt.savefig("%s/%s.png" % (PLOT_DIR, stn_id), bbox_inches='tight', dpi=250) plt.close() t2 = datetime.now() delta = t2 - t1 print("===========================================================================") timestamp("INFO", "Completed Plotting in %s seconds" % (delta.total_seconds())) print("===========================================================================")
plt.close() t2 = datetime.now() delta = t2 - t1 print("===========================================================================") timestamp("INFO", "Completed Plotting in %s seconds" % (delta.total_seconds())) print("===========================================================================") if __name__ == '__main__': ap = argparse.ArgumentParser() ap.add_argument('-s', '--start', dest="start", help="YYYY-MM-DD") ap.add_argument('-e', '--end', dest="end", help="YYYY-MM-DD") ap.add_argument('-np', '--nproc', dest='nproc', help="If using mproc for data read, specify number of cores") args = ap.parse_args() t1 = datetime.now() timestamp("INFO", "Begin WW3 Plotting routines...") NOW = datetime.now() if args.start is not None: start = parse_time(args.start) else: start = parse_time(NOW - timedelta(days=NUM_DAYS)) if args.end is not None: end = parse_time(args.end) else: end = parse_time(NOW) main(start, end, nproc=args.nproc)
def get_ww3(run_time): """Download realtime WW3 data from the NCEP NOMADS server. This function uses Wesley Ebisuzaki's get_inv.pl and get_grib.pl scripts to allow downloading of a small subset of the full grib2 data files. Saves a ton of space and time! Further information can be found at [1]_ below. [1]_: Ebisuzaki, W.: Fast Downloading of GRIB Files: Partial http transfers. https://www.cpc.ncep.noaa.gov/products/wesley/fast_downloading_grib.html Parameters ---------- run_time : string Time of the model run to be downloaded. For is: YYYY-MM-DD/HH Returns ------- Individual .grib2 files downloaded to the local system. """ dt = datetime.strptime(run_time, '%Y-%m-%d/%H') data_path = "%s/%s" % (DATA_DIR, dt.strftime('%Y-%m-%d')) # Not too pretty, but watch for a potential RACE conditions on multiple crons? try: os.makedirs(data_path) except OSError as e: if e.errno != errno.EEXIST: raise date_str = dt.strftime('%Y%m%d%H') fname = 'glwu.grlc_2p5km.t%sz.grib2' % (date_str[-2:]) url = '%s%s/%s' % (REALTIME_URL, date_str[0:8], fname) full_name = data_path + '/' + fname # Shell argument string to invoke the get_grib.pl script. arg = '%s/etc/get_inv.pl %s.idx | egrep "%s" | %s/etc/get_grib.pl %s %s' \ % (SCRIPT_PATH, url, GRIB_VARS, SCRIPT_PATH, url, full_name) # Test for file existence. num_attempts = 1 file_exists = False while not file_exists and num_attempts < 90 and not os.path.exists(full_name): file_exists = is_url_alive(url) if file_exists: # As long as we trust the input argument here. Don't extend this to take a # command-line argument, however. subprocess.call(arg, shell=True) timestamp("[INFO]", "Downloading %s" % (full_name)) break else: timestamp("[WARN]", "Can't find %s. Sleeping." % (url)) num_attempts += 1 time.sleep(SLEEP_TIME) timeout_check() # Test for file size on the local system. num_attempts = 1 file_size = get_filesize(full_name) while file_size < 10 and num_attempts < 10: file_size = get_filesize(full_name) timestamp("[WARN]", "File not of expected size. Re-downloading") subprocess.call(arg, shell=True) num_attempts += 1 time.sleep(SLEEP_TIME) timeout_check()
time.sleep(SLEEP_TIME) timeout_check() # Test for file size on the local system. num_attempts = 1 file_size = get_filesize(full_name) while file_size < 10 and num_attempts < 10: file_size = get_filesize(full_name) timestamp("[WARN]", "File not of expected size. Re-downloading") subprocess.call(arg, shell=True) num_attempts += 1 time.sleep(SLEEP_TIME) timeout_check() ap = argparse.ArgumentParser() ap.add_argument('-t', '--time-str', dest="time_str", help="YYYY-MM-DD/HH") args = ap.parse_args() if not args.time_str: # No arguments passed. Cycle time will be the current hour. NOW = datetime.utcnow() time_str = NOW.strftime('%Y-%m-%d/%H') else: # User-specified cycle time time_str = args.time_str THEN = time.time() timestamp("[INFO]", "Starting WW3 Download for %s" % (time_str)) get_ww3(time_str) timestamp("[INFO]", "Download Complete")