Ejemplo n.º 1
0
def load_model(model_path, start_date, end_date, field, nowcast_flag=False):
    """Loads model grid_T data in date range defined by start_date and end_date
    Only considers daily averaged model fields.
    Returns model depths, variable defined by field, and dates associated with
    variable
    """

    files = analyze.get_filenames(start_date, end_date, '1d', 'grid_T',
                                  model_path)
    if nowcast_flag:
        var, dates = analyze.combine_files(files, field, np.arange(0, 40),
                                           np.arange(0, 898),
                                           np.arange(0, 398))
        tmp = nc.Dataset(files[0])
        depth = tmp.variables['deptht'][:]
    else:
        tracers = nc.MFDataset(files)
        time = tracers.variables['time_counter']
        # convert date
        dates = []
        start = datetime.datetime.strptime(time.time_origin,
                                           ' %Y-%b-%d %H:%M:%S')
        for t in time[:]:
            d = start + datetime.timedelta(seconds=t)
            dates.append(d)
        depth = tracers.variables['deptht'][:]
        var = tracers.variables[field][:]

    return depth, var, dates
Ejemplo n.º 2
0
def load_model_data(sdt, edt, grid_B, results_home, period, variable, lat,
                    lon):
    """Load the model data in date range of interest and at the location.
    :arg sdt: the start date
    :type sdt: datetime object

    :arg edt: the end date
    :type edt: datetime object

    :arg grid_B: the model bathymetry
    :type grid_B: netCDF4 handle

    :arg results_home: directory for model results
    :type restuls_home: string

    :arg period: the model avergaing period eg '1h' or '1d'
    :time period: string

    :arg variable: the variable to be loaded, eg 'vosaline' or 'votemper'
    :type variable: string.

    :arg lat: the latitude
    :type lat: float

    :arg lon: the longitude
    :type lon: float

    :returns: var, times, mdepths - the array of data, the times associated
    and the model depth array
    """

    files = analyze.get_filenames(sdt, edt, period, 'grid_T', results_home)
    ftmp = nc.Dataset(files[0])
    mdepths = ftmp.variables['deptht'][:]
    # Model grid
    X = grid_B.variables['nav_lon'][:]
    Y = grid_B.variables['nav_lat'][:]
    bathy = grid_B.variables['Bathymetry'][:]
    # Look up grid coordinates
    j, i = geo_tools.find_closest_model_point(lon, lat, X, Y)
    # Grab model data
    var, times = analyze.combine_files(files, variable,
                                       np.arange(mdepths.shape[0]), j, i)
    print('Model bathymetry:', bathy[j, i])
    return var, times, mdepths
Ejemplo n.º 3
0
to = datetime.datetime(2014, 11, 26)
tf = datetime.datetime(2015, 4, 26)
nowcast_path = '/results/SalishSea/nowcast/'
grid = nc.Dataset(
    '/data/nsoontie/MEOPAR/NEMO-forcing/grid/bathy_meter_SalishSea2.nc')
lon_grid = grid.variables['nav_lon']
lat_grid = grid.variables['nav_lat']
jmin = 200
jmax = 380
imin = 250
imax = 330
jss = np.arange(jmin, jmax)
iss = np.arange(imin, imax)
ks = np.arange(0, 40)

filest = analyze.get_filenames(to, tf, '1h', 'grid_T', nowcast_path)
filesw = analyze.get_filenames(to, tf, '1h', 'grid_W', nowcast_path)
Ts, times = analyze.combine_files(filest, 'votemper', ks, jss, iss)
Ss, times = analyze.combine_files(filest, 'vosaline', ks, jss, iss)
sshs, times = analyze.combine_files(filest, 'sossheig', 'None', jss, iss)
Ws, times = analyze.combine_files(filesw, 'vovecrtz', ks, jss, iss)
Ws = Ws.data

tmp = nc.Dataset(filest[0])
depthst = tmp.variables['deptht'][:]
tmp = nc.Dataset(filesw[0])
depthsw = tmp.variables['depthw'][:]

lons = lon_grid[jss, iss]
lats = lat_grid[jss, iss]
Ejemplo n.º 4
0
                    station, places.PLACES[station]['depth']))
                ax.set_ylim(tick)
    for ax in axs.flatten():
        ax.grid()
        ax.set_xlim([t_o, t_f])
        ax.get_yaxis().get_major_formatter().set_useOffset(False)
    for ax in axs[:, -1]:
        ax.legend(loc=(1, 0.25))
    fig.autofmt_xdate()
    plt.show()
    fig.savefig('VENUS.png')


t_o = datetime.datetime(2014, 9, 12)
t_f = datetime.datetime(2017, 9, 11)
fnames = analyze.get_filenames(t_o, t_f, '1d', 'grid_T',
                               '/results/SalishSea/nowcast-green/')
grid_B = nc.Dataset(
    '/data/nsoontie/MEOPAR/NEMO-forcing/grid/bathymetry_201702.nc')
mesh_mask = nc.Dataset(
    '/data/nsoontie/MEOPAR/NEMO-forcing/grid/mesh_mask201702.nc')

runs = {
    'nowcast-green': {
        'grid': grid_B,
        'mesh': mesh_mask,
        'fnames': fnames,
        'nemo36': True
    }
}
stations = ['East node', 'Central node', 'Delta DDL node', 'Delta BBL node']
plot_station(stations, runs, t_o, t_f)
Ejemplo n.º 5
0
def compare_model(to,
                  tf,
                  lighthouse,
                  mode,
                  period,
                  grid_B,
                  smin=28,
                  smax=33,
                  tmin=6,
                  tmax=14):
    """Compare model surface salinity with lighthouse observations in a date
    range.

    :arg to: the beginning of the date range
    :type to: datetime object

    :arg tf: the end of the date range
    :type tf: datetime object

    :arg lighthouse: the name of the lighthouse
    :type lighthouse: string

    :arg mode: the model simulation mode - nowcast or spinup or nowcast-green
    :type mode: string

    :arg period: the averaging period for model results - 1h or 1d
    :type period: string

    :arg grid_B: NEMO bathymetry grid
    :type grid_B: netCDF4 handle

    :arg smin: minumum salinity for axis limits
    :type smin: float

    :arg smax: maximium salinity for axis limits
    :type smax: float

    :arg tmin: minumum temperature for axis limits
    :type tmin: float

    :arg tmax: maximium temperature for axis limits
    :type tmax: float

    :returns: fig, a figure object
    """
    # Load observations
    data, lat, lon = load_lighthouse(LIGHTHOUSES[lighthouse])
    # Look up modle grid point
    X = grid_B.variables['nav_lon'][:]
    Y = grid_B.variables['nav_lat'][:]
    j, i = geo_tools.find_closest_model_point(lon, lat, X, Y)

    # load model
    files = analyze.get_filenames(to, tf, period, 'grid_T', MODEL_PATHS[mode])
    sal, time = analyze.combine_files(files, 'vosaline', 0, j, i)
    if mode == 'nowcast-green':
        sal = teos_tools.teos_psu(sal)
    temp, time = analyze.combine_files(files, 'votemper', 0, j, i)
    if period == '1h':
        # look up times of high tides
        ssh, times = analyze.combine_files(files, 'sossheig', 'None', j, i)
        max_inds = daytime_hightide(ssh, times)
        sal = sal[max_inds]
        temp = temp[max_inds]
        time = time[max_inds]
        title_str = 'max daytime tides'
    else:
        title_str = 'daily average'

    # plotting
    fig, axs = plt.subplots(1, 2, figsize=(15, 5))
    # plot time series
    # salinity
    ax = axs[0]
    ax.plot(time, sal, label=mode)
    ax.plot(data['date'], data['Salinity(psu)'], label='observations')
    ax.legend(loc=0)
    ax.set_title('{} Salinity - {}'.format(lighthouse, title_str))
    ax.set_xlim([to, tf])
    ax.set_ylim([smin, smax])
    ax.set_ylabel('Salinity [psu]')
    # temperature
    ax = axs[1]
    ax.plot(time, temp, label=mode)
    ax.plot(data['date'], data['Temperature(C)'], label='observations')
    ax.legend(loc=0)
    ax.set_title('{} Temperature - {}'.format(lighthouse, title_str))
    ax.set_xlim([to, tf])
    ax.set_ylim([tmin, tmax])
    ax.set_ylabel('Temperature [deg C]')
    fig.autofmt_xdate()

    return fig
Ejemplo n.º 6
0
def ellipse_files_nowcast(to,
                          tf,
                          iss,
                          jss,
                          path,
                          depthrange='None',
                          period='1h',
                          station='None'):
    """ This function loads all the data between the start and the end date
    that contains in the netCDF4 nowcast files in the
    specified depth range. This will make an area with all the indices
    indicated, the area must be continuous for unstaggering.

    :arg to: The beginning of the date range of interest
    :type to: datetime object

    :arg tf: The end of the date range of interest
    :type tf: datetime object

    :arg iss: x index.
    :type i: list or numpy.array

    :arg jss: y index.
    :type j: list or numpy.array

    :arg path: Defines the path used(eg. nowcast)
    :type path: string

    :arg depthrange: Depth values of interest in meters as a float for a single
        depth or a list for a range. A float will find the closest depth that
        is <= the value given. Default is 'None' for the whole water column
        (0-441m).
    :type depthrange: float, string or list.

    :arg period: period of the results files
    :type period: string - '1h' for hourly results or '15m' for 15 minute

    :arg station: station for analysis
    :type station: string 'None' if not applicable. 'ddl', 'east' or 'central'

    :returns: u, v, time, dep.
    """

    # The unstaggering in prepare_vel.py requiers an extra i and j, we add one
    # on here to maintain the area, or point chosen.
    jss = np.append(jss[0] - 1, jss)
    iss = np.append(iss[0] - 1, iss)

    # Makes a list of the filenames that follow the criteria in the indicated
    # path between the start and end dates.
    if period == '15m':
        files = analyze.get_filenames_15(to, tf, station, path)
        filesu = files
        filesv = files
    else:
        filesu = analyze.get_filenames(to, tf, period, 'grid_U', path)
        filesv = analyze.get_filenames(to, tf, period, 'grid_V', path)

    # Set up depth array and depth range
    depth = nc.Dataset(filesu[-1]).variables['depthu'][:]

    # Case one: for a single depth.
    if type(depthrange) == float or type(depthrange) == int:
        k = np.where(depth <= depthrange)[0][-1]
        dep = depth[k]
    # Case two: for a specific range of depths
    elif type(depthrange) == list:
        k = np.where(
            np.logical_and(depth > depthrange[0], depth < depthrange[1]))[0]
        dep = depth[k]
    # Case three: For the whole depth range 0 to 441m.
    else:
        k = depthrange
        dep = depth

    # Load the files
    u, time = analyze.combine_files(filesu, 'vozocrtx', k, jss, iss)
    v, time = analyze.combine_files(filesv, 'vomecrty', k, jss, iss)

    # For the nowcast the reftime is always Sep10th 2014. Set time of area we
    # are looking at relative to this time.
    reftime = tidetools.CorrTides['reftime']
    time = tidetools.convert_to_hours(time, reftime=reftime)

    return u, v, time, dep