elif cthresh == '10mm': ithresh = 4 cthresh_title = '$\geq$ 10 mm' elif cthresh == '25mm': ithresh = 5 cthresh_title = '$\geq$ 25 mm' elif cthresh == '50mm': ithresh = 6 cthresh_title = '$\geq$ 50 mm' else: print 'Invalid threshold', cthresh print 'Please use POP, 1mm, 2p5mm, 5mm, 10mm, 25mm, 50mm' print 'Exiting.' sys.exit() yyyy, mm, dd, hh = splitdate(cyyyymmddhh) cyyyy = str(yyyy) cdd = str(dd) chh = str(hh) cmonths = [ 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec' ] cmonth = cmonths[mm - 1] iyyyymmddhh = int(cyyyymmddhh) # ---- read in precipitation analysis filename = data_directory + 'precip_analyses_ccpa_v1_2002010100_to_2016123100.nc' print 'reading ', filename nc = Dataset(filename)
elif cthresh == '10mm': ithresh = 4 cthresh_title = '$\geq$ 10 mm' elif cthresh == '25mm': ithresh = 5 cthresh_title = '$\geq$ 25 mm' elif cthresh == '50mm': ithresh = 6 cthresh_title = '$\geq$ 50 mm' else: print 'Invalid threshold', cthresh print 'Please use POP, 1mm, 2p5mm, 5mm, 10mm, 25mm, 50mm' print 'Exiting.' sys.exit() yyyy,mm,dd,hh = splitdate(cyyyymmddhh) cyyyy = str(yyyy) cdd = str(dd) chh = str(hh) cmonths = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'] cmonth = cmonths[mm-1] iyyyymmddhh = int(cyyyymmddhh) yyyy_verif,mm_verif,dd_verif,hh_verif = splitdate(date_verif) cyyyy_verif = str(yyyy_verif) cdd_verif = str(dd_verif) chh_verif = str(hh_verif) cmonth_verif = cmonths[mm_verif-1] # ---- read in precipitation analysis
acglall = [] bias = None ntime = None for date in dates: datev = dateutils.dateshift(date, fhour) # read analysis filea = os.path.join(analpath, 'pgbanl.ecm.%s' % datev) grbs = pygrib.open(filea) grb = grbs.select(shortName=vargrb, level=level)[0] verif_data = grb.values[::-1, :] grbs.close() #print verif_data.shape, verif_data.min(), verif_data.max() # read climo grbsclimo = pygrib.open( os.path.join(climopath, 'cmean_1d.1959%s' % datev[4:8])) yyyy, mm, dd, hh = dateutils.splitdate(datev) grbclimo = grbsclimo.select(shortName=vargrb, level=level, dataTime=100 * hh)[0] climo_data = grbclimo.values[::-1, :] grbsclimo.close() #print climo_data.shape, climo_data.min(), climo_data.max() # read forecast data from tiled history files. cube_data = np.zeros((6, res, res), np.float32) for ntile in range(1, 7, 1): datafile = '%s/%s/longfcst/fv3_history.tile%s.nc' % (datapath, date, ntile) nc = Dataset(datafile) if ntime is None: times = nc['time'][:].tolist() ntime = times.index(
date = sys.argv[1] ifhr1 = int(sys.argv[2]) ifhr2 = int(sys.argv[3]) ifhrinc = int(sys.argv[4]) if len(sys.argv) > 5: run = sys.argv[5] else: run = 'FIMY' expname = os.getenv('EXPT') pngdir = os.getenv('PNGDIR') if expname is None: expname = 'gfsenkf_t574' pngdir = '/lfs1/projects/gfsenkf/hurrplots/%s' % date hr = date[8:10] yyyy, mm, dd, hh = splitdate(date) julday = dayofyear(yyyy, mm, dd) + 1 datapath = '/lfs1/projects/gfsenkf/tcvitals' globstring = datapath + '/reftrk*%s*' % date print globstring reftrks = glob.glob(globstring) print reftrks reftrks.insert(0, 'WPAC') reftrks.insert(0, 'EPAC') reftrks.insert(0, 'ATL') # just use the three basins (no domains centered on storms) #reftrks = ['ATL','EPAC','WPAC']