#read in command line variables if platform.system() == 'Linux': base_dir = '/home/snegusse/modeling/brazos_river' data_dir = '/home/snegusse/tmp' tec_filename = 'tdff_curt.dat' curtain_filename = 'brazos_centerline.shp' curtain_file = os.path.join(base_dir, curtain_filename) tec_file = os.path.join(data_dir, tec_filename) param = 'tdff.63' sfile = 19 nfile = 1 model = pyselfe.Dataset(os.path.join(data_dir, str(sfile) + '_' + param)) # Read in xy/node locations of profile line cx, cy = read_curtain_shapefile(curtain_file) #take every third point and remove points upstream of bz2 cx = cx[::-1] cy = cy[::-1] cx = cx[::3][-10:] cy = cy[::3][-10:] channel_orientation = calc_channel_orientation(cx, cy)
'river_boundary': np.array([.5, 1, 1, .5]), 'bz2': np.array([.5, 1., 1., 1., 1., .5]), 'straight_ds_bz3': np.array([.5, 1, 1, 1, 1, .5]), 'giww-u': np.array([.5, 1, 1, 1, 1, 1, .5]), 'giww-d': np.array([.5, 1, 1, 1, 1, 1, .5]), 'near_dow': np.array([.5, 1, 1, 1, 1, .5]) } sites_sal_data = {} sites_xvel_data = {} sites_dp_data = {} salt_flux = {} flow_cfs = {} mod_initial = ( pyselfe.Dataset(os.path.join(mod_dir, sal_init_file)), pyselfe.Dataset(os.path.join(mod_dir, vel_init_file)), ) [sal_t, tstep, eta, dp, sal_data] = \ mod_initial[0].read_time_series('salt.63', nfiles=nfiles, datadir=mod_dir, sfile=sfile) [vel_t, tstep, eta, dp, vel_data] = \ mod_initial[1].read_time_series('hvel.64',nfiles=nfiles, datadir=mod_dir, sfile=sfile) for profile in profile_nodes.keys(): flow_file = os.path.join(base_dir, mod_dir, profile + '_flow.csv') hdf5_file = os.path.join(base_dir, mod_dir, profile + '.h5') if os.path.exists(flow_file): flow_data = pd.read_csv(flow_file, sep=',',
sites_uvel_data = {} sites_vvel_data = {} sites_dp_data = {} mod_t = {} for i in mod_dirs.keys(): hdf5_file = os.path.join(base_dir, mod_dirs[i], i + '_' + \ 'mod_data' + '.h5') if os.path.exists(hdf5_file): hdf5_storage = pd.io.pytables.HDFStore(hdf5_file, mode='r') sites_sal_data[i] = hdf5_storage['salinity'] sites_uvel_data[i] = hdf5_storage['u_vel'] sites_vvel_data[i] = hdf5_storage['v_vel'] sites_dp_data[i] = hdf5_storage['depth'] else: mod_initial[i] = (pyselfe.Dataset(os.path.join(mod_dirs[i], sal_init_file)), pyselfe.Dataset(os.path.join(mod_dirs[i], vel_init_file)),) [sal_t, tstep, eta, dp, sal_data] = \ mod_initial[i][0].read_time_series('salt.63', xy=brazos_sites.values, nfiles=nfiles, datadir=mod_dirs[i], sfile=sfile) [vel_t, tstep, eta, dp, vel_data] = \ mod_initial[i][1].read_time_series('hvel.64', xy=brazos_sites.values, nfiles=nfiles, datadir=mod_dirs[i], sfile=sfile) sal_mod_datetimes = [mod_start_datetime + pd.datetools.Second(dt) for dt in sal_t] vel_mod_datetimes = [mod_start_datetime + pd.datetools.Second(dt) for dt in vel_t]
if len(sys.argv)==1 or sys.argv[1]=='help' or sys.argv[1]=='-help' or sys.argv[1 ]=='--help' : location=runBash("which " + programname) message = runBash("awk \'/^##/ {print $0}\' < " + location); sys.exit(message) #read in command line variables datadir=sys.argv[1] tecfile=sys.argv[2] param=sys.argv[3] nlevel=sys.argv[4] nfile=int(sys.argv[5]) model = pyselfe.Dataset(datadir + '/1_'+param) # level = nlevel - 1 because of zero indexing try: nlevel = int(nlevel) [t,t_iter,eta,dp,data] = model.read_time_series(param,levels=nlevel-1, nfiles=nfile, sfile=1, datadir=datadir) except ValueError: if nlevel == 'all': [t,t_iter,eta,dp,data] = model.read_time_series(param, nfiles=nfile, sfile=1, datadir=datadir) data = data[:,:,1:,:].mean(axis=2)
mon_sites_file = '../field_data/corpus_station_list.csv' selfe_data_dir = '/home/snegusse/modeling/corpus_christi_bay/laquinta_current_modeling/depth_sensitivty/47ft/windon/outputs/' mon_sites = np.genfromtxt(mon_sites_file, dtype=None, names=True, delimiter=',', skip_header=1) #convert from latlon to utm14 p = Proj(proj='utm', zone=14, ellps='WGS84') xy = np.array(p(mon_sites['DDLon'], mon_sites['DDLat'])).transpose() #initialize model data readers selfe = pyselfe.Dataset(selfe_data_dir + '1_salt.63', nfiles=7) for site, xy in zip(mon_sites, xy): print 'processing site: ', site['Name'] pd = [] if 'tc0' in site['Name']: data = np.genfromtxt('../field_data/' + site['Name'] + '.csv', delimiter=',', names='datetime,water_level,water_temperature', dtype=[datetime, np.float, np.float], missing_values='NA', converters={'datetime': mk_tcoon_date}) d = { 'water_level': data['water_level'], 'water_temperature': data['water_temperature'] }