示例#1
0
def main():
    fig, ax = plt.subplots(row, col,figsize=figsize)
    wrf_file = Dataset(wrf_dir+wrf_name)
    m = drawmap(ax,wrf_file,map_dir,Lat_min,Lat_max,Lon_min,Lon_max)
    times = extract_times(wrf_file,timeidx=ALL_TIMES)

    # Check frames of wrfout*
    if times.shape[0] > 1:
        pmdbz(wrf_file,times,m,save_dir,drawclb)
    else:
        for file in os.listdir(wrf_dir):
            if fnmatch.fnmatch(file, wrf_name[:10]+'*'):
                wrf_file = Dataset(wrf_dir+file)
                times = extract_times(wrf_file,timeidx=ALL_TIMES)
                pmdbz(wrf_file,times,m,save_dir,drawclb)
示例#2
0
    def get_wrf_datetime_obj(cls):
        """get_wrf_datetime_obj: Get wrf timesteps as datetime objects

        Input:
            ncfile (netCDF4) netcdf file that contains the wrf model output
        Output:
            wrf_dt (list) List of wrf model time steps as datetime objects
        """
        #Get WRF time-step and reformat
        wrf_times = wrf.extract_times(cls.ncfile, timeidx=wrf.ALL_TIMES,\
                                  method='cat', squeeze=True, cache=None,\
                                  meta=False, do_xtime=False).astype(str)

        time_obj = [""] * len(wrf_times)
        for ijk, time in enumerate(wrf_times):
            # FOR SOME REASON THERE ARE 9 miliseconds precision when there should be 6
            time_obj[ijk] = datetime.strptime(time[:-3],
                                              '%Y-%m-%dT%H:%M:%S.%f')

        diff_wrf = time_obj[-1] - time_obj[0]
        wrf_timestep = (time_obj[1] - time_obj[0]).total_seconds()
        tot_sec_wrf = diff_wrf.total_seconds()

        if len(time_obj) != (tot_sec_wrf / wrf_timestep) + 1:
            sys.exit("WRF TIME NOT THE RIGHT LENGTH")

        cls.wrf_dt = time_obj
示例#3
0
def WindDataExtraction(WindFileName, t0):
    # Interpolates temporally (with fixed lat/lon/pres?)
    from wrf import to_np

    if isinstance(t0, np.ndarray): t0 = t0[0]

    WindFile = Dataset(WindFileName)
    times_all = extract_times(WindFile, timeidx=ALL_TIMES)
    times_jd = np.array(
        [Time(str(t), format='isot', scale='utc').jd for t in times_all])

    idx_after = np.searchsorted(times_jd, t0)
    idx_before = idx_after - 1

    interp_factor = (t0 - times_jd[idx_before]) / (times_jd[idx_after] -
                                                   times_jd[idx_before])
    if interp_factor < 0 or interp_factor > 1:
        print('WindWarning: The darkflight time is ouside the bounds of WindData' \
            ' by {0:.3f} times!'.format(interp_factor))

    WindArray = []
    for i in [idx_before, idx_after]:
        hei_3d = np.array([to_np(getvar(WindFile, 'z',
                                        timeidx=i))])  #[1,z,y,x]
        NumberLevels = np.shape(hei_3d)[1]  # Number heights

        lat_3d = np.array([
            np.stack([to_np(getvar(WindFile, 'lat', timeidx=i))] *
                     NumberLevels,
                     axis=0)
        ])  #[1,z,y,x]
        lon_3d = np.array([
            np.stack([to_np(getvar(WindFile, 'lon', timeidx=i))] *
                     NumberLevels,
                     axis=0)
        ])  #[1,z,y,x]

        wen_3d = to_np(getvar(WindFile, 'uvmet', timeidx=i))  #[2,z,y,x]
        wu_3d = np.array([to_np(getvar(WindFile, 'wa',
                                       timeidx=i))])  #[1,z,y,x]

        temp_3d = np.array([to_np(getvar(WindFile, 'tk',
                                         timeidx=i))])  #[1,z,y,x]
        pres_3d = np.array([to_np(getvar(WindFile, 'p',
                                         timeidx=i))])  #[1,z,y,x]
        rh_3d = np.array([to_np(getvar(WindFile, 'rh',
                                       timeidx=i))])  #[1,z,y,x]

        # Construct WindArray = [lat,lon,hei,we,wn,wu,temp,pres,rh]
        WindArray.append(
            np.vstack((lat_3d, lon_3d, hei_3d, wen_3d, wu_3d, temp_3d, pres_3d,
                       rh_3d)))

    WindArray = (1 -
                 interp_factor) * WindArray[0] + interp_factor * WindArray[1]

    return WindArray
    def get_tstamp(self, fname):
        """read the timestamp from a WRF restart file

        assumes that the restart file contains a single timestamp in the
        netCDF variable Times.  If this assumption is violated results are
        undefined.

        ARGS:
        fname (str): full path to WRF restart file

        RETURNS:
        numpy.Datetime64 object containing the restart file timestamp
        """
        nc = netCDF4.Dataset(fname, 'r')
        self.tstamp = pd.to_datetime(wrf.extract_times(nc, 0))
        nc.close()
        return (self.tstamp)
示例#5
0
def get_wrf_datetime_obj(ncfile):
    """Get wrf timesteps as datetime objects"""
    #Get WRF time-step and reformat
    wrf_times = wrf.extract_times(ncfile, timeidx=wrf.ALL_TIMES,\
                              method='cat', squeeze=True, cache=None,\
                              meta=False, do_xtime=False).astype(str)

    wrf_dt = [""] * len(wrf_times)
    for ijk in range(len(wrf_times)):
        # FOR SOME REASON THERE ARE 9 miliseconds precision when there should be 6
        wrf_dt[ijk] = datetime.strptime(wrf_times[ijk][:-3],
                                        '%Y-%m-%dT%H:%M:%S.%f')

    diff_wrf = wrf_dt[-1] - wrf_dt[0]
    wrf_timestep = (wrf_dt[1] - wrf_dt[0]).total_seconds()
    tot_sec_wrf = diff_wrf.total_seconds()

    if len(wrf_dt) != (tot_sec_wrf / wrf_timestep) + 1:
        print("WRF TIME NOT THE RIGHT LENGTH")
        sys.exit(0)

    return wrf_dt
示例#6
0
if len(sys.argv) != 2:
    print("Usage:")
    print("      ./prep_wrf.py wrfout_file")
    exit()

ifile = str(sys.argv[1])
ofile = "_pt_" + ifile

print("Input file:      ", ifile)
print("Output file: ", ofile)

incid = Dataset(ifile, "r")
times = wrf.extract_times(incid,
                          None,
                          method='cat',
                          squeeze=True,
                          cache=None,
                          meta=True,
                          do_xtime=True)

ntimes = times.shape[0]

var = incid.variables["U"][:]
u = wrf.destagger(var, 3, meta=False)
var = incid.variables["V"][:]
v = wrf.destagger(var, 2, meta=False)
var = incid.variables["W"][:]
w = wrf.destagger(var, 1, meta=False)
ph = incid.variables["PH"][:]
phb = incid.variables["PHB"][:]
# slp = wrf.getvar(incid, "slp", timeidx=wrf.ALL_TIMES)
示例#7
0
from progress.bar import IncrementalBar
import numpy as np
from datetime import datetime
import pandas as pd

# Select WRF latitude/longitude and time-step.
lat = -26.555
lon = -49.355
inittime = 0

# Insert WRF file directory.
wrf_dir = '/media/ueslei/Ueslei/INPE/PCI/Projetos/SC_2008/Outputs/normal/wrf_ts.nc'
nc_file = netCDF4.Dataset(wrf_dir)

# Create a variable in order to use the IncrementalBar.
timestr = extract_times(nc_file, timeidx=None, meta=False, do_xtime=False)
timestr = pd.to_datetime(timestr, format="%b %d %Y")
range_loop = len(timestr)
bar = IncrementalBar('', max=len(timestr))

# Create a variable with zeros to list the data throught loop.
tc_list = np.zeros([range_loop])
rh_list = np.zeros([range_loop])
slp_list = np.zeros([range_loop])
uvmet_list = np.zeros([range_loop])
prec_list = np.zeros([range_loop])

# Starting looping throught time.
for i in range(inittime, range_loop, 1):
    # Open WRF variables.
    rh = getvar(nc_file,
示例#8
0
                    # print(icount)
                    name_str[icount] = dir_string+"wrfout_d01_2011-"+month_str[i_month]+"-"+day_str[i_day]+"_"+hour_str[i_hour]+":00:00"
                    icount = icount + 1
    else: # June, only has 30 days; Aug: the wrfout puts does not have 08-31 outputs
        for i_day in range(len(day_str)-1):
            for i_hour in range(len(hour_str)):
                    name_str[icount] = dir_string+"wrfout_d01_2011-"+month_str[i_month]+"-"+day_str[i_day]+"_"+hour_str[i_hour]+":00:00"
                    icount = icount + 1

wrflist = [Dataset(name_str[i]) for i in range(len(name_str))]

print(wrflist)

#wrf_times = times.get_times(wrflist, timeidx=ALL_TIMES, method='cat')

wrf_times = extract_times(wrflist, timeidx=ALL_TIMES, method='cat', do_xtime=False)
print(pandas.to_datetime(wrf_times))

times = getvar(wrflist,"times",timeidx=ALL_TIMES,method="cat")
ntimes = np.size(times.values)
print(times.values)
print(ntimes)

T2 = getvar(wrflist,"T2",timeidx=ALL_TIMES,method="cat")
RAINNC = getvar(wrflist, "RAINNC",timeidx=ALL_TIMES,method="cat")
RAINC = getvar(wrflist, "RAINC",timeidx=ALL_TIMES,method="cat")
HFX = getvar(wrflist, "HFX",timeidx=ALL_TIMES,method="cat")
LH = getvar(wrflist, "LH",timeidx=ALL_TIMES,method="cat")

print(wrf_times)
print(T2)
示例#9
0
from dateutil import tz
import xarray


from wrf import getvar, ALL_TIMES, extract_times, latlon_coords, get_cartopy, to_np, ll_to_xy, xy_to_ll, geo_bounds, CoordPair

# Open the NetCDF file
ncfile = Dataset("wrf_jan_29.nc")
pressure = N.array(ncfile.variables['PSFC'][::])
pressure = pressure/100
temps = N.array(ncfile.variables['T2'][::])
lats = N.array(ncfile.variables['XLAT'][0,:,:])
lons = N.array(ncfile.variables['XLONG'][0,:,:])
temps = temps - 273.15
temp_f = ((temps * (9./5.)) + 32.)
time = extract_times(ncfile, ALL_TIMES, method="cat")
temp = getvar(ncfile, "tk")
tempf = ((temp * 9./5.) + 32.)
time_string=[]
for i in range(41):
    string = str(time[i])
    string = string[0:13]
    time_string.append(string)
    
EST = []
from_zone = tz.tzutc()
to_zone = tz.tzlocal()
counter = 0
for i in range(41):
    utc = datetime.strptime(time_string[i], '%Y-%m-%dT%H')
    utc = utc.replace(tzinfo=from_zone)
示例#10
0
    def __init__(self,
                 wrf_files_path,
                 wrf_domain_number,
                 dateLimits,
                 variables_to_extract,
                 subset_of_wrfDomain=None):

        datefrom = np.datetime64(
            pd.to_datetime(dateLimits[0],
                           format='%Y-%m-%d %H:%M:%S',
                           errors='ignore'))
        dateto = np.datetime64(
            pd.to_datetime(dateLimits[1],
                           format='%Y-%m-%d %H:%M:%S',
                           errors='ignore'))

        # So far, spinup discarded hours are hardcoded
        spinup = 0
        '''
        List with all files to read. This part of code may difer depending on how you name and store
        the wrf output files. The nc_files list sould contain all the files over which the code
        is going to loop and concatenate variables in "Time" dimension axis.
        '''
        nc_files_list = []
        folders = [sorted(glob.glob(wrf_files_path + '*'))]
        # notice that it is assumed that wrf files are named with a convention of wrfout_d0xx
        fileN = 'wrfout_d0' + str(wrf_domain_number)
        for ii in folders[0]:
            # wrfoutfiles.append(sorted(glob.glob(ii+'/WRF/'+fileN+'*'))[0])
            if ii.split('/')[-1].startswith(fileN):
                nc_files_list.append(ii)

        Nfiles = len(nc_files_list)

        print(
            str(Nfiles) + ' files found for wrfout_d0' +
            str(wrf_domain_number) + ' outputs')
        print(
            "NOTE. It is assumed that every ncfile contains an initialized Run for which "
            + str(spinup) + " h of spinup are discarded")

        print('The following variables are to be extracted ')
        print(variables_to_extract)

        # loop over all files found
        for ii, file in enumerate(nc_files_list):

            print("Reading netCDF data from file:", file)

            # open the netcdf file
            f2 = netCDF4.Dataset(file)

            # get the netCDF time data
            ncTimes = extract_times(f2, timeidx=ALL_TIMES, squeeze=True)

            spinupDate = ncTimes[0] + np.timedelta64(spinup, 'h')

            # protects from empty dates
            if sum(np.logical_and(ncTimes >= datefrom,
                                  ncTimes <= dateto)) != 0:
                if ii == 0:
                    # if no subset_of_wrfDomain is chosen, it extracts all domain grid points
                    if subset_of_wrfDomain is None:
                        iBT = np.arange(0, extract_dim(f2, 'bottom_top'))
                        iSN = np.arange(0, extract_dim(f2, 'south_north'))
                        iWE = np.arange(0, extract_dim(f2, 'west_east'))
                    else:
                        lat_s, lon_s, L = subset_of_wrfDomain[
                            0], subset_of_wrfDomain[1], subset_of_wrfDomain[2]
                        # get the indexes of a subset domain for memory efficient purposes
                        iBT, iSN, iWE = get_index_of_subset_domain(
                            f2, lat_s, lon_s, L)

                    # makes sure spinup dates are discarded while filtering only period between desired dates
                    iTimes = np.logical_and.reduce(
                        (ncTimes >= spinupDate, ncTimes >= datefrom,
                         ncTimes <= dateto))

                    # set the valid times
                    self.times = ncTimes[iTimes]
                    self.min_i_lat = min(iSN)
                    self.min_i_lon = min(iWE)

                    self.lat_lon = np.concatenate(
                        (f2.variables.get('XLAT')[0:1, iSN, iWE],
                         f2.variables.get('XLONG')[0:1, iSN, iWE]),
                        axis=0)

                    # Build z above ground
                    zagl = get_zagl(f2, iTimes, iBT, iSN, iWE)

                    self.variables_data = readAllvars(f2, variables_to_extract,
                                                      iTimes, iBT, iSN, iWE)
                    self.variables_names = self.variables_data.keys()
                    self.input_file = f2

                else:

                    iTimes = np.logical_and.reduce(
                        (ncTimes > spinupDate, ncTimes >= datefrom,
                         ncTimes <= dateto))

                    self.times = np.concatenate((self.times, ncTimes[iTimes]))

                    # concatenate z above ground
                    zaglTmp = get_zagl(f2, iTimes, iBT, iSN, iWE)
                    zagl = np.concatenate((zagl, zaglTmp), axis=0)

                    # concatenate every selected variable
                    varsTmp = readAllvars(f2, variables_to_extract, iTimes,
                                          iBT, iSN, iWE)

                    for iVar in varsTmp.keys():
                        self.variables_data[iVar] = np.concatenate(
                            (self.variables_data[iVar], varsTmp[iVar]), axis=0)

                    f2.close()
            else:
                print('File : ' + file + 'does not contain any valid dates')

        # average-out the height as the variability of the actual values is low
        self.heights = np.nanmean(zagl, axis=(0, 2, 3))

        # Reference Time for the netcdf files
        self.referenceDate = np.datetime64(
            datetime.datetime(1970, 1, 1, 0, 0, 0))

        # convert time stamp to "seconds since <referenceDate>"
        self.seconds = pd.Series(self.times -
                                 self.referenceDate).dt.total_seconds().values

        # number of grid points in the subset wrf box
        self.nt, self.nz, self.ny, self.nx = zagl.shape