Example #1
0
def metpy_read_wrf(tidx, froms, froms0, fwrf, lons_out, lats_out):

    wrfin = Dataset(fwrf)

    ds = xr.Dataset()

    slp = getvar(wrfin, "slp", timeidx=tidx)
    ds['slp'] = smooth2d(slp, 3, cenweight=4)

    lats, lons = latlon_coords(slp)
    ds['lats'] = lats
    ds['lons'] = lons
    landmask = extract_vars(wrfin, timeidx=tidx,
                            varnames=["LANDMASK"]).get("LANDMASK")
    u10 = extract_vars(wrfin, timeidx=tidx, varnames=["U10"]).get("U10")
    v10 = extract_vars(wrfin, timeidx=tidx, varnames=["V10"]).get("V10")
    ds['u10'] = u10.where(landmask == 0)
    ds['v10'] = v10.where(landmask == 0)
    latent = extract_vars(wrfin, timeidx=tidx, varnames=["LH"]).get("LH")
    ds['latent'] = smooth2d(latent, 3, cenweight=4)  #latent.where(landmask==0)
    t2m = extract_vars(wrfin, timeidx=tidx, varnames=["T2"]).get("T2") - 273.15
    ds['t2m'] = smooth2d(t2m, 3, cenweight=4)
    sst = extract_vars(wrfin, timeidx=tidx, varnames=["SST"]).get("SST")
    ds['sst'] = sst.where(landmask == 0)

    romsin = xr.open_dataset(froms)
    romsin = romsin.rename({"lat_rho": "lat", "lon_rho": "lon"})
    romsin = romsin.isel(ocean_time=tidx)

    ds['sst_5m'] = romsin.isel(z_r=0).temp
    ds['water_temp'] = romsin.temp
    ds['water_ucur'] = romsin.ucur / 100
    ds['water_vcur'] = romsin.vcur / 100
    ds.water_ucur.attrs['units'] = 'm/s'
    ds.water_vcur.attrs['units'] = 'm/s'

    romsin = xr.open_dataset(froms0)
    romsin = romsin.rename({"lat_rho": "lat", "lon_rho": "lon"})
    romsin = romsin.isel(ocean_time=tidx)
    ds['h'] = romsin.h

    mld = get_oml_depth(froms0, t_in=tidx)
    mld = smooth2d(mld, 3, cenweight=4)
    ds['oml_depth'] = xr.DataArray(mld, ds.sst_5m.coords, ds.sst_5m.dims,
                                   ds.sst_5m.attrs)
    ds['oml_depth2'] = ds.oml_depth.where(ds.h > 20)

    ds = ds.drop(['XLONG', 'XLAT', 'XTIME', 'Time'])
    ds = ds.rename({'south_north': 'eta_rho', 'west_east': 'xi_rho'})

    interp_method = 'bilinear'
    ds_out = xr.Dataset({
        'lat': (['lat'], lats_out),
        'lon': (['lon'], lons_out)
    })
    regridder = xe.Regridder(ds, ds_out, interp_method)
    regridder.clean_weight_file()
    ds = regridder(ds)
    ds = ds.squeeze()

    dxy = 10000.

    ds = ds.metpy.parse_cf().squeeze()

    utau, vtau = wind_stress(to_np(ds.u10), to_np(ds.v10))
    ds['u_tau'] = xr.DataArray(utau, ds.u10.coords, ds.u10.dims, ds.u10.attrs)
    ds['v_tau'] = xr.DataArray(vtau, ds.v10.coords, ds.v10.dims, ds.v10.attrs)
    curl = mpcalc.vorticity(utau * units('m/s'),
                            utau * units('m/s'),
                            dx=dxy * units.meter,
                            dy=dxy * units.meter)
    ds['wind_stress_curl'] = xr.DataArray(np.array(curl), ds.u10.coords,
                                          ds.u10.dims, ds.u10.attrs)

    div = []
    for z in range(len(ds.z_r)):
        div0 = mpcalc.divergence(ds.water_ucur.isel(z_r=z) * units('m/s'),
                                 ds.water_vcur.isel(z_r=z) * units('m/s'),
                                 dx=dxy * units.meter,
                                 dy=dxy * units.meter)
        div.append(div0)
    ds['cur_div'] = xr.DataArray(div, ds.water_ucur.coords, ds.water_ucur.dims,
                                 ds.water_ucur.attrs)

    div = mpcalc.divergence(ds.water_ucur.isel(z_r=2) * units('m/s'),
                            ds.water_vcur.isel(z_r=2) * units('m/s'),
                            dx=dxy * units.meter,
                            dy=dxy * units.meter)
    ds['surf_cur_div'] = xr.DataArray(np.array(div), ds.u10.coords,
                                      ds.u10.dims, ds.u10.attrs)

    print(ds)
    return ds
Example #2
0
#====================INPUT===================
#all common variables are stored separately
imp.reload(plume)  #force load each time
#=================end of input===============

print('ANALYSIS OF PLUME CROSS-SECTIONS')
print('===================================')

for nCase, Case in enumerate(plume.tag):
    print('Examining case: %s ' % Case)

    #----------check for interpolated data----------------------------
    interppath = plume.wrfdir + 'interp/wrfinterp_' + Case + '.npz'
    wrfpath = plume.wrfdir + 'wrfout_' + Case
    wrfdata = netcdf.netcdf_file(wrfpath, mode='r')
    ncdict = wrf.extract_vars(wrfdata, None, ('GRNHFX'))
    wrfdata.close()

    if os.path.isfile(interppath):
        print('Interpolated data found at: %s' % interppath)
        interpdict = np.load(interppath).item()  # load here the above pickle
    else:
        sys.exit(
            'ERROR: no interpolated data found - run prep_plumes.py first!')

    #convert and average data------------------------------------------
    ghfx = ncdict['GRNHFX'] / 1000.  #convert to kW
    qvapor = interpdict['QVAPOR'] * 1000.  #convert to g/kg
    temp = interpdict['T'] + 300.  #add perturbation and base temperature
    w = interpdict['W']
    u = interpdict['U']
Example #3
0
lvl = np.arange(0, 2500, 40)  #vertical levels in m

#=================end of input===============

print('FIRE CROSS-SECTION HEAT FLUX AND W')
print('===================================')

#import data
wrfpath = wrfdir + 'wrfout_' + tag

print('Extracting NetCDF data from %s ' % wrfpath)
wrfdata = netcdf.netcdf_file(wrfpath, mode='r')

#prep WRF data----------------------------------------------------
ncdict = wrf.extract_vars(
    wrfdata, None,
    ('GRNHFX', 'W', 'QVAPOR', 'T', 'PHB', 'PH', 'U', 'P', 'PB', 'V'))

#get height and destagger vars
zstag = (ncdict['PHB'] + ncdict['PH']) / 9.81
z = wrf.destagger(zstag, 1)
u = wrf.destagger(ncdict['U'], 3)
v = wrf.destagger(ncdict['V'], 2)
p = ncdict['P'] + ncdict['PB']

interppath = wrfdir + 'interp/wrfinterp_' + tag + '.npy'
if os.path.isfile(interppath):
    interpdict = np.load(interppath).item()  # load here the above pickle
    # qinterp, winterp, interpt = interpdict[()]['QVAPOR'],interpdict[()]['W'], interpdict[()]['T']
    print('Interpolated data found at: %s' % interppath)
else:
Example #4
0
import imp

#====================INPUT===================
#all common variables are stored separately
import rxcadreMOIST as rx
imp.reload(rx)  #force load each time
#====================INPUT===================
testTime = 120  #min since start
testLvl = 17  #height level to run the analysis on
blLvl = 28  #level of BL top
#=================end of input===============

#extract data from bubble simulation
print('Extracting NetCDF data from %s ' % rx.spinup_path)
wrfdata = netcdf.netcdf_file(rx.spinup_path, mode='r')
ncdict = wrf.extract_vars(wrfdata, None, ('W', 'T', 'PHB', 'PH', 'XTIME'))
time1 = np.where(ncdict['XTIME'] == testTime)[0][0]
w1 = ncdict['W'][time1, :blLvl, :, :].ravel()
t1 = (ncdict['T'][time1, :blLvl, :, :] + 300).ravel()
t1BL = (ncdict['T'][time1, :blLvl, :, :] + 300).ravel()

#extract data from perturbation simulation
spinup_tsk = rx.spinup_path[:-6] + 'tsk'
print('Extracting NetCDF data from %s ' % spinup_tsk)
wrfdata_tsk = netcdf.netcdf_file(spinup_tsk, mode='r')
ncdict_tsk = wrf.extract_vars(wrfdata_tsk, None,
                              ('W', 'T', 'PHB', 'PH', 'XTIME'))
time2 = np.where(ncdict_tsk['XTIME'] == testTime)[0][0]
w2 = ncdict_tsk['W'][time2, :blLvl, :, :].ravel()
t2 = (ncdict_tsk['T'][time2, :blLvl, :, :] + 300).ravel()
t2BL = (ncdict_tsk['T'][time2, :blLvl, :, :] + 300).ravel()
def main():
    # Get files from command line or take hard-coded folder.
    # Arguements are optional, but if one is specified, they all should be
    # vertical_profile_plots.py [input_pattern] [output_directory] [num_threads]
    # you can use a wildcard pattern:
    # i.e., python vertical_profile_plots.py ../output_files/wrfout* ../plots/ 8
    # or you can list the input files:
    # i.e., python vertical_profile_plots.py ../output_files/wrfout_d01 ../output_files/wrfout_d02 ../plots/ 1
    if len(sys.argv) > 1:
        filenames = sys.argv[1:-2]  #glob.glob(sys.argv[1])
        print(filenames)
        output_dir = sys.argv[-2]
        wrf.omp_set_num_threads(int(sys.argv[-1]))
    else:
        filenames = glob.glob(
            "/project/ssmith_uksr/WRF_ARW/cases/eclipse_2017/eclipse_model_on_5_dom_out/wrfout_d01*"
        )
        output_dir = ''

    # Get data from published sensor data
    ws_workbook = xlrd.open_workbook(
        '/project/ssmith_uksr/WRF_ARW/DATA/2017_eclipse_observed/Weather_Station_data.xlsx'
    )
    ws_first_sheet = ws_workbook.sheet_by_index(0)
    tower_workbook = xlrd.open_workbook(
        '/project/ssmith_uksr/WRF_ARW/DATA/2017_eclipse_observed/Tower_data.xlsx'
    )
    tower_first_sheet = tower_workbook.sheet_by_index(0)
    soil_workbook = xlrd.open_workbook(
        '/project/ssmith_uksr/WRF_ARW/DATA/2017_eclipse_observed/Soil_data.xlsx'
    )
    soil_first_sheet = soil_workbook.sheet_by_index(0)

    time_ws, temp_ws, rad_ws, wspd_ws, wdir_ws = get_observed_series(
        ws_first_sheet)
    time_tower, temp_tower, _, wspd_tower, wdir_tower = get_observed_series(
        tower_first_sheet)
    time_soil, temp_soil, _, _, _ = get_observed_series(soil_first_sheet)

    # Coordinates to take sample from
    center_lat = 36.797326
    center_lon = -86.812341

    for filename in sorted(filenames):
        print(filename)
        #Structure the WRF output file
        ncfile = Dataset(filename)

        #Extract data from WRF output files
        tc = wrf.getvar(ncfile, "tc",
                        wrf.ALL_TIMES)  # Atmospheric temperature in celsius
        t2 = wrf.getvar(ncfile, "T2",
                        wrf.ALL_TIMES)  # Temperature at 2 m, in Kelvin
        # Convert T2 to degrees C
        t2 = t2 - 273.15
        t2.attrs["units"] = "degC"
        theta = wrf.getvar(ncfile, "theta", wrf.ALL_TIMES, units="degC")
        rh = wrf.getvar(ncfile, "rh", wrf.ALL_TIMES)
        wspd_wdir = wrf.getvar(ncfile, "uvmet_wspd_wdir", wrf.ALL_TIMES)
        # Split wind speed and direction
        wspd = wspd_wdir[0, :, :, :, :]
        wdir = wspd_wdir[1, :, :, :, :]

        # These variables aren't included in getvar, so have to be extracted manually
        swdown = wrf.extract_vars(ncfile, wrf.ALL_TIMES,
                                  "SWDOWN").get('SWDOWN')
        gnd_flx = wrf.extract_vars(ncfile, wrf.ALL_TIMES,
                                   "GRDFLX").get('GRDFLX')

        #Create Dictionary to associate quanitity names with the corresponding data
        two_dim_vars = {'swdown': swdown, 'gnd_flx': gnd_flx, 'T2': t2}
        three_dim_vars = {'tc': tc, 'theta': theta, 'rh': rh, 'wspd': wspd}

        #Get the grid coordinates from our earth lat/long coordinates
        center_x, center_y = wrf.ll_to_xy(ncfile, center_lat, center_lon)

        # Plot all 3D variables over time
        for var_name, var_data in three_dim_vars.items():
            # Convert to Local Time
            var_data = to_local_time(var_data)

            # Get data frequency
            freq = pd.Timedelta(var_data["Time"][1].values -
                                var_data["Time"][0].values)

            # Interpolate to height above ground level
            try:
                var_data_agl = wrf.vinterp(ncfile,
                                           var_data,
                                           'ght_agl',
                                           np.linspace(0, 0.1, 100),
                                           field_type=var_name,
                                           timeidx=wrf.ALL_TIMES)
            except ValueError:
                var_data_agl = wrf.vinterp(ncfile,
                                           var_data,
                                           'ght_agl',
                                           np.linspace(0, 0.1, 100),
                                           field_type='none',
                                           timeidx=wrf.ALL_TIMES)

            # Convert height to meters
            var_data_agl["interp_level"] = var_data_agl["interp_level"] * 1000

            # Time ranges
            plot_time_ranges = []
            plot_time_range1 = pd.date_range(start="2017-08-21T10:24:00",
                                             end="2017-08-21T14:33:00",
                                             freq=freq)
            plot_time_range1 = plot_time_range1.floor(freq)
            plot_time_range2 = pd.date_range(start="2017-08-21T13:09:00",
                                             end="2017-08-21T14:33:00",
                                             freq=freq)
            plot_time_range2 = plot_time_range2.floor(freq)
            plot_time_range3 = pd.date_range(start="2017-08-21T09:45:00",
                                             end="2017-08-21T15:00:00",
                                             freq=freq)
            plot_time_range3 = plot_time_range3.floor(freq)

            plot_time_ranges = [
                plot_time_range1, plot_time_range2, plot_time_range3
            ]

            # Vertical Profile Plots
            for plot_time_range in [
                    rng for rng in plot_time_ranges if len(rng) > 1
            ]:
                fig, ax = plt.subplots()
                var_data_agl.isel(
                    south_north=center_y,
                    west_east=center_x).sel(Time=plot_time_range,
                                            method="nearest").plot(ax=ax,
                                                                   x="Time")
                save_plot(ax=ax,
                          title='',
                          y_label="z (m)",
                          x_label="Local Time (CDT)",
                          var_name=var_name,
                          plot_type_name="vertical_profile",
                          plot_time_range=plot_time_range,
                          filename_in=filename,
                          output_dir=output_dir)
                plt.close(fig)

                # Line plots
                fig, ax = plt.subplots()
                if (var_name == 'tc'):
                    ax.plot(time_ws,
                            temp_ws,
                            '^k-',
                            label='2.5m',
                            markevery=500)
                    ax.plot(time_soil,
                            temp_soil,
                            'vb-',
                            label='-0.02m',
                            markevery=500)
                if (var_name == 'wspd'):
                    y_label = "wind speed" + " (" + var_data.attrs[
                        "units"] + ")"
                    wspd_ws_rolling = pd.DataFrame(wspd_ws).rolling(
                        120).mean().values
                    wspd_tower_rolling = pd.DataFrame(wspd_tower).rolling(
                        120).mean().values
                    ax.plot(time_ws,
                            wspd_ws_rolling,
                            'c-',
                            label='3 m, 2 min avg',
                            linewidth=0.5)
                    ax.plot(time_tower,
                            wspd_tower_rolling,
                            'k-',
                            label='7 m, 2 min avg',
                            linewidth=0.5,
                            zorder=0)
                var_data.isel(bottom_top=0,
                              south_north=center_y,
                              west_east=center_x).sel(Time=plot_time_range,
                                                      method="nearest").plot(
                                                          ax=ax,
                                                          x="Time",
                                                          label="WRF-Eclipse",
                                                          color="orange")
                y_label = var_data.name + " (" + var_data.attrs["units"] + ")"
                save_plot(ax=ax,
                          title='',
                          y_label=y_label,
                          x_label="Local Time (CDT)",
                          var_name=var_name,
                          plot_type_name="line_plot",
                          plot_time_range=plot_time_ranges[2],
                          filename_in=filename,
                          output_dir=output_dir)
                plt.close(fig)

        # Plot 2D values
        for var_name, var_data in two_dim_vars.items():
            # Convert to Local Time
            var_data = to_local_time(var_data)

            # Line plots
            fig, ax = plt.subplots()
            y_label = var_data.name + " (" + var_data.attrs["units"] + ")"
            if (var_name == 'swdown'):
                ax.plot(time_ws,
                        rad_ws,
                        'or-',
                        label='measured',
                        linewidth=0.5,
                        markevery=500)
                y_label = "solar radiation" + " (" + var_data.attrs[
                    "units"] + ")"
            if (var_name == 'T2'):
                ax.plot(time_ws, temp_ws, '^k-', label='2.5m', markevery=500)
                ax.plot(time_soil,
                        temp_soil,
                        'vb-',
                        label='-0.02m',
                        markevery=500)
                y_label = "temperature" + " (" + var_data.attrs["units"] + ")"
            var_data.isel(south_north=center_y,
                          west_east=center_x).sel(Time=plot_time_range,
                                                  method="nearest").plot(
                                                      ax=ax,
                                                      x="Time",
                                                      label="WRF-Eclipse",
                                                      color="orange")
            save_plot(ax=ax,
                      title='',
                      y_label=y_label,
                      x_label="Local Time (CDT)",
                      var_name=var_name,
                      plot_type_name="line_plot",
                      plot_time_range=plot_time_range,
                      filename_in=filename,
                      output_dir=output_dir)
Example #6
0
from __future__ import print_function

import time
from netCDF4 import Dataset
from wrf import getvar, ALL_TIMES, extract_vars

wrf_filenames = [
    "/Users/ladwig/Documents/wrf_files/wrf_vortex_multi/moving_nest/wrfout_d02_2005-08-28_00:00:00",
    "/Users/ladwig/Documents/wrf_files/wrf_vortex_multi/moving_nest/wrfout_d02_2005-08-28_12:00:00",
    "/Users/ladwig/Documents/wrf_files/wrf_vortex_multi/moving_nest/wrfout_d02_2005-08-29_00:00:00"
]

wrfin = [Dataset(x) for x in wrf_filenames]

my_cache = extract_vars(
    wrfin, ALL_TIMES,
    ("P", "PB", "PH", "PHB", "T", "QVAPOR", "HGT", "U", "V", "W", "PSFC"))

start = time.time()
for var in ("avo", "eth", "cape_2d", "cape_3d", "ctt", "dbz", "mdbz", "geopt",
            "helicity", "lat", "lon", "omg", "p", "pressure", "pvo", "pw",
            "rh2", "rh", "slp", "ter", "td2", "td", "tc", "theta", "tk", "tv",
            "twb", "updraft_helicity", "ua", "va", "wa", "uvmet10", "uvmet",
            "z", "cfrac", "zstag", "geopt_stag"):
    v = getvar(wrfin, var, ALL_TIMES)
end = time.time()

print("Time taken without variable cache: ", (end - start))

start = time.time()
for var in ("avo", "eth", "cape_2d", "cape_3d", "ctt", "dbz", "mdbz", "geopt",
Example #7
0
    #----------check for interpolated data----------------------------
    interppath = wrfdir + 'interp/wrfinterp_' + Case + '.npy'

    if os.path.isfile(interppath):

        print('Interpolated data found at: %s' % interppath)
        interpfile = open(interppath, 'rb')
        interpdict = pickle.load(interpfile)  # load here the above pickle

    else:
        print(
            'WARNING: no interpolated data found - generating: SLOW ROUTINE!')
        wrfpath = wrfdir + 'wrfout_' + Case
        wrfdata = netcdf.netcdf_file(wrfpath, mode='r')
        ncdict = wrf.extract_vars(wrfdata,
                                  None, ('GRNHFX', 'W', 'QVAPOR', 'T', 'PHB',
                                         'PH', 'U', 'P', 'PB', 'V', 'tr17_1'),
                                  meta=False)
        ncdict['PM25'] = ncdict.pop('tr17_1')

        #get height and destagger vars
        zstag = (ncdict['PHB'] + ncdict['PH']) // 9.81
        z = wrf.destagger(zstag, 1)
        u = wrf.destagger(ncdict['U'], 3)
        w = wrf.destagger(ncdict['W'], 1)
        v = wrf.destagger(ncdict['V'], 2)

        #list variables to interpolate
        nT, nZ, nY, nX = np.shape(z)

        winterp = np.empty((nT, len(lvl), nY, nX)) * np.nan
        uinterp = np.empty((nT, len(lvl), nY, nX)) * np.nan
time_data = np.genfromtxt(datapath,
                          usecols=(0),
                          skip_header=3,
                          delimiter=',',
                          converters={0: str2date},
                          dtype=str)
obs_W = obs_data[:, 0]
obs_H = obs_data[:, 1] * 1000 / (hfx_sensor_conversion)

#get model data
print('Extracting NetCDF data from %s ' % rx.wrfdata)
nc_data = netcdf.netcdf_file(rx.wrfdata, mode='r')
UTMx = nc_data.variables['XLONG'][0, :, :] + rx.ll_utm[0]
UTMy = nc_data.variables['XLAT'][0, :, :] + rx.ll_utm[1]

ncdict = wrf.extract_vars(nc_data, None, ('PHB', 'PH', 'W'))

#get height and destagger
if os.path.isfile(rx.z_path):
    print('.....loading destaggered height array from %s' % rx.z_path)
    z = np.load(rx.z_path)
else:
    print('.....destaggering height data')
    zstag = (ncdict['PHB'] + ncdict['PH']) / 9.81
    z = wrf.destagger(zstag, 1)
    np.save(rx.z_path, z)

print('.....destaggering model W')
w = wrf.destagger(ncdict['W'], 1)

nT, nZ, nY, nX = np.shape(z)
             44371]  #start and end time of pre-burn corkscrew for background
garage_ssm = [45000, 47200]  #start and end time of garage profile

animations = 0
#=================end of input===============
print('ANALYSIS OF VERTICAL PLUME RISE AND DIPSERSION')

print('.....extracting NetCDF data from %s ' % rx.wrfdata)
ncdata = netcdf.netcdf_file(rx.wrfdata, mode='r')

#load georeferencing data for the same run
print('.....importing wrf coordinates from  %s ' % rx.geo_path)
wrfgeo = np.load(rx.geo_path, allow_pickle=True).item()

#get variables
ncdict = wrf.extract_vars(ncdata, None,
                          ('PHB', 'PH', 'XTIME', 'tr17_1', 'tr17_2'))
ncdict['CO2'] = ncdict.pop('tr17_1')
ncdict['CO'] = ncdict.pop('tr17_2')
tracers = ['CO', 'CO2']

#get height and destagger
if os.path.isfile(rx.z_path):
    print('.....loading destaggered height array from %s' % rx.z_path)
    z = np.load(rx.z_path)
else:
    print('.....destaggering height data')
    zstag = (ncdict['PHB'] + ncdict['PH']) / 9.81
    z = wrf.destagger(zstag, 1)
    np.save(rx.z_path, z)

#get domain dimensions
Example #10
0
                         '%H:%M:%S').time()), dt.datetime.combine(
                             basetime,
                             dt.datetime.strptime(rx.corkscrew[1],
                                                  '%H:%M:%S').time())
gg_start, gg_end = dt.datetime.combine(
    basetime,
    dt.datetime.strptime(rx.garage[0],
                         '%H:%M:%S').time()), dt.datetime.combine(
                             basetime,
                             dt.datetime.strptime(rx.garage[1],
                                                  '%H:%M:%S').time())

#import wrf data
print('Extracting NetCDF data from %s ' % rx.spinup_path)
wrfdata = netcdf.netcdf_file(rx.spinup_path, mode='r')
ncdict = wrf.extract_vars(wrfdata, None, ('T', 'PHB', 'PH', 'QVAPOR', 'XTIME'))

#get geopotential array and convert to height
zstag = (ncdict['PHB'] + ncdict['PH']) / 9.81
z = wrf.destagger(zstag, 1)
z_ave = np.mean(z, (0, 2, 3))
# # START AT PATH EXTRACTION (FOR HIGH FREQUENCY DATA)
#
# model_datetime = [basetime + dt.timedelta(hours=int(rx.runstart[:2]),minutes=t) for t in ncdict['XTIME']]
# #get indecies of samples corresponding to model output times
# tidx = [np.argmin(abs(disp_dict['time'] - t)) for t in model_datetime] #times since start
#
# #construct KDtree from idealized grid
# lat = wrfdata.variables['XLAT'][0,:,:]
# lon = wrfdata.variables['XLONG'][0,:,:]
# grid_coord = list(zip(lat.ravel(),lon.ravel()))
Example #11
0
str2date = lambda x: datetime.strptime(x.decode("utf-8"), '%y-%m-%d %H:%M'
                                       ) - timedelta(hours=6)
time_data = np.genfromtxt(datapath,
                          usecols=(0),
                          skip_header=3,
                          delimiter=',',
                          converters={0: str2date},
                          dtype=str)

#get model data
print('Extracting NetCDF data from %s ' % rx.wrfdata)
nc_data = netcdf.netcdf_file(rx.wrfdata, mode='r')
UTMx = nc_data.variables['XLONG'][0, :, :] + rx.ll_utm[0]
UTMy = nc_data.variables['XLAT'][0, :, :] + rx.ll_utm[1]

ncdict = wrf.extract_vars(nc_data, None, ('PHB', 'PH', 'U', 'V'), meta=False)

#get height and destagger vars
print('...destaggering data')
zstag = (ncdict['PHB'] + ncdict['PH']) / 9.81
z = wrf.destagger(zstag, 1)
u = wrf.destagger(ncdict['U'], 3)
v = wrf.destagger(ncdict['V'], 2)

nT, nZ, nY, nX = np.shape(z)

#create timeseries of CSU wind
print('-->Creating OBS timeries')
num_pts = int(freq * ave_int)
data_samples = int(np.shape(obs_data)[0] / num_pts)
uCSU = []
Example #12
0
#====================INPUT===================
#import all common project variables
import plume
imp.reload(plume)  #force load each time

#====================INPUT===================

testLvl = 30  #height level to run the analysis on
#=================end of input===============

print('Extracting NetCDF data from %s ' % plume.wrfdir)
wrfdata = netcdf.netcdf_file(plume.wrfdir + 'wrfout_W5F7R5Tspinup', mode='r')

ncdict = wrf.extract_vars(wrfdata,
                          None, ('U', 'V', 'W', 'T', 'PHB', 'PH'),
                          meta=False)
u = wrf.destagger(ncdict['U'], 3)
v = wrf.destagger(ncdict['V'], 2)
w = wrf.destagger(ncdict['W'], 1)
t = ncdict['T']

print('.....destaggering height data')
zstag = (ncdict['PHB'] + ncdict['PH']) / 9.81
zstag_ave = np.mean(zstag, (2, 3))
z = wrf.destagger(zstag_ave, 1)

z_ave = np.mean(z, (0))

nT, nZ1, nY, nX = np.shape(zstag)
nZ = nZ1 - 1
def metpy_read_wrf(fwrf, fout, lons_out, lats_out):

    wrfin = Dataset(fwrf)
    ds = xr.Dataset()

    slp = getvar(wrfin, "slp", timeidx=ALL_TIMES)
    ds['slp'] = smooth2d(slp, 3, cenweight=4)

    lats, lons = latlon_coords(slp)
    ds['lats'] = lats
    ds['lons'] = lons

    rainc = []
    for tidx in range(len(ds.Time)):
        rainc0 = extract_vars(wrfin, timeidx=tidx,
                              varnames=["RAINNC"]).get("RAINNC")
        if tidx > 0:
            rainc0 -= extract_vars(wrfin,
                                   timeidx=tidx - 1,
                                   varnames=["RAINNC"]).get("RAINNC")
        rainc.append(smooth2d(rainc0, 3, cenweight=4))
    ds['rain'] = xr.DataArray(rainc, ds.slp.coords, ds.slp.dims, ds.slp.attrs)
    ds['latent'] = extract_vars(wrfin, timeidx=ALL_TIMES,
                                varnames=["LH"]).get("LH")
    ds['u10'] = extract_vars(wrfin, timeidx=ALL_TIMES,
                             varnames=["U10"]).get("U10")
    ds['v10'] = extract_vars(wrfin, timeidx=ALL_TIMES,
                             varnames=["V10"]).get("V10")
    ds['t2m'] = extract_vars(wrfin, timeidx=ALL_TIMES,
                             varnames=["T2"]).get("T2")
    ds['q2m'] = extract_vars(wrfin, timeidx=ALL_TIMES,
                             varnames=["Q2"]).get("Q2")
    ds['sst'] = extract_vars(wrfin, timeidx=ALL_TIMES,
                             varnames=["SST"]).get("SST")
    ds['td2'] = getvar(wrfin, "td2", timeidx=ALL_TIMES)
    ds['th2'] = extract_vars(wrfin, timeidx=ALL_TIMES,
                             varnames=["TH2"]).get("TH2")
    ds['mask'] = extract_vars(wrfin, timeidx=ALL_TIMES,
                              varnames=["LANDMASK"]).get("LANDMASK")

    ds['p'] = getvar(wrfin, "pressure", timeidx=ALL_TIMES)
    ds['z'] = getvar(wrfin, "geopt", timeidx=ALL_TIMES)
    ds['u'] = getvar(wrfin, "ua", timeidx=ALL_TIMES)
    ds['v'] = getvar(wrfin, "va", timeidx=ALL_TIMES)
    ds['w'] = getvar(wrfin, "wa", timeidx=ALL_TIMES)
    ds['omega'] = getvar(wrfin, "omega", timeidx=ALL_TIMES)
    ds['tk'] = getvar(wrfin, "tk", timeidx=ALL_TIMES)
    ds['th'] = getvar(wrfin, "th", timeidx=ALL_TIMES, units='K')
    ds['eth'] = getvar(wrfin, "eth", timeidx=ALL_TIMES, units='K')
    ds['avo'] = getvar(wrfin, "avo", timeidx=ALL_TIMES)
    ds['pvo'] = getvar(wrfin, "pvo", timeidx=ALL_TIMES)
    ds['wspd'] = getvar(wrfin, "wspd_wdir", timeidx=ALL_TIMES,
                        units="m/s")[0, :]

    ds = ds.rename({'south_north': 'eta_rho', 'west_east': 'xi_rho'})
    ds = ds.rename({"XLAT": "lat", "XLONG": "lon"})
    ds = ds.drop(['wspd_wdir'])

    interp_method = 'bilinear'
    ds_out = xr.Dataset({
        'lat': (['lat'], lats_out),
        'lon': (['lon'], lons_out)
    })
    regridder = xe.Regridder(ds, ds_out, interp_method)
    regridder.clean_weight_file()
    ds = regridder(ds)
    ds = ds.squeeze()

    #accrain = ds.rain.rolling(Time=6, center=True).sum()
    #acclatent = ds.latent.rolling(Time=6, center=True).sum()
    #ds = ds.rolling(Time=6, center=True).mean()
    accrain = ds.rain.rolling(Time=12, center=False).sum()
    acclatent = ds.latent.rolling(Time=12, center=False).sum()
    #ds = ds.rolling(Time=6, center=True).mean()
    ds['accrain'] = accrain
    ds['acclatent'] = acclatent

    ds.to_netcdf(fout)

    return ds
Example #14
0
import matplotlib.pyplot as plt
from scipy.io import netcdf
from scipy import interpolate
import os.path
import wrf

#====================INPUT===================
wrfpath = '/Users/nmoisseeva/data/plume/RxCADRE/Feb2019/wrfout_L2G_cat1obs_spinup'
fig_dir = '/Users/nmoisseeva/code/plume/figs/RxCADRE/'
tsample = 30  #min between samples
#=================end of input===============

print('Extracting NetCDF data from %s ' % wrfpath)
wrfdata = netcdf.netcdf_file(wrfpath, mode='r')

ncdict = wrf.extract_vars(wrfdata, None, ('T', 'U', 'V', 'W', 'PHB', 'PH'))
# u = wrf.destagger(ncdict['U'],3)
# v = wrf.destagger(ncdict['V'],2)
# w = wrf.destagger(ncdict['W'],1)

#get geopotential array and convert to height
zstag = (ncdict['PHB'] + ncdict['PH']) / 9.81
z = wrf.destagger(zstag, 1)
z_ave = np.mean(z, (0, 2, 3))
nT, nZ, nY, nX = np.shape(z)

plt.figure()
for time in range(0, nT, tsample):
    print time
    plt.plot(ncdict['T'][time, :, int(nY / 2),
                         int(nX / 2)],
Example #15
0
# [email protected]
# June 2018


import numpy as np
import matplotlib.pyplot as plt
from scipy.io import netcdf
import sys
import imp
import wrf
#====================INPUT===================
#all common variables are stored separately
import plume
imp.reload(plume) 	#force load each time

testpath = plume.wrfdir + 'wrfout_W3S200F7R2_short'
#=================end of input===============

testdata = netcdf.netcdf_file(testpath, mode ='r')
ncdict = wrf.extract_vars(testdata, None, ('QVAPOR','tr17_1'))
ncdict['PM25'] = ncdict.pop('tr17_1')

plt.plot(ncdict['PM25'][70,:,75,100],'r--')
ax1 = plt.gca()
ax1.set_ylim([0,max(ncdict['PM25'][70,:,75,100])])
ax2 = plt.gca().twinx()
plt.plot(ncdict['QVAPOR'][70,:,75,100])
ax2.set_ylim([0,max(ncdict['QVAPOR'][70,:,75,100])])
plt.show()
#====================INPUT===================
#all common variables are stored separately
import rxcadreMOIST as rx
imp.reload(rx)  #force load each time

#====================INPUT===================

testLvl = 5  #height level to run the analysis on
xstep = 40  #grud spacing in m
#=================end of input===============

print('Extracting NetCDF data from %s ' % rx.spinup_path)
wrfdata = netcdf.netcdf_file(rx.spinup_path, mode='r')

ncdict = wrf.extract_vars(wrfdata, None, ('U', 'V', 'W', 'T'))
u = wrf.destagger(ncdict['U'], 3)
v = wrf.destagger(ncdict['V'], 2)
w = wrf.destagger(ncdict['W'], 1)

#get height and destagger
if os.path.isfile(rx.z_path):
    print('.....loading destaggered height array from %s' % rx.z_path)
    z = np.load(rx.z_path)
else:
    print('.....destaggering height data')
    zstag = (ncdict['PHB'] + ncdict['PH']) / 9.81
    z = wrf.destagger(zstag, 1)
    np.save(rx.z_path, z)

z_ave = np.mean(z, (0, 2, 3))