コード例 #1
0
# Load model output.
fvcom = 'sample.nc'

varlist = ('lonc', 'latc', 'ua', 'va', 'time')
dims = {'time': ':360'}  # first 15 days at hourly sampling

# Define a plot subset ((xmin, xmax), (ymin, ymax)).
subset = np.array(((-4.2416, -4.0837), (50.2656, 50.3966)))

# Scaling factor for the ellipses. You will need to experiment with this
# value.
scaling = 2000

# Find the model nodes which fall within the subset defined above.
FVCOM = ncread(fvcom, vars=varlist, dims=dims, noisy=False)

# Create a time array for the TAPPy call.
FVCOM['datetimes'] = num2date(FVCOM['time'], 'days since 1858-11-17 00:00:00')
years = [i.year for i in FVCOM['datetimes']]
months = [i.month for i in FVCOM['datetimes']]
days = [i.day for i in FVCOM['datetimes']]
hours = [i.hour for i in FVCOM['datetimes']]
minutes = [i.minute for i in FVCOM['datetimes']]
seconds = [i.second for i in FVCOM['datetimes']]
Times = np.column_stack((years, months, days, hours, minutes, seconds))

# Find the indices of the locations which fall within the subset.
elems = np.where((FVCOM['lonc'] > subset[0].min()) *
                 (FVCOM['lonc'] < subset[0].max()) *
                 (FVCOM['latc'] > subset[1].min()) *
コード例 #2
0
from PyFVCOM.grid_tools import findNearestPoint

# Multiple output files are transparently loaded by ncread.
fvcom = ['sample_april.nc', 'sample_may.nc', 'sample_june.nc']

# Positions we're interested in plotting. The findNearestPoint 
# function will find the closest node in the unstructured grid.
xy = np.array(((-4.5, 55), (-6.9, 52)))  # lon, lat pairs

# Extract only the surface layer for the plot.
dims = {'siglay': '0'}

# Our variables of interest.
varlist = ('lon', 'lat', 'time', 'temp')

FVCOM = ncread(fvcom, vars=varlist, dims=dims)

# Make datetime objects for the time series plots.
FVCOM['datetimes'] = num2date(FVCOM['time'], 'days since 1858-11-17 00:00:00')

# Find the nodes in the grid closest to the positions we're interested in plotting.
nx, ny, dist, idx = findNearestPoint(FVCOM['lon'], FVCOM['lat'], 
                                     xy[:, 0], xy[:, 1])

# Now plot the time series.

rcParams['mathtext.default'] = 'regular'  # sensible font for the LaTeX labelling

fig = plt.figure(figsize=(10, 7))  # size in inches
for c, ind in enumerate(idx):
    ax = fig.add_subplot(len(idx), 1, c + 1)
コード例 #3
0
from cmocean import cm
from mpl_toolkits.basemap import Basemap
from mpl_toolkits.axes_grid1 import make_axes_locatable

from PyFVCOM.read_FVCOM_results import ncread

# Load the model output.
fvcom = 'sample.nc'

# Extract only the first 20 time steps.
dims = {'time': ':20'}

# And only these variables
varlist = ('lon', 'lat', 'nv', 'zeta', 'Times')

FVCOM = ncread(fvcom, vars=varlist, dims=dims, noisy=True)

# Lay the groundwork for a plot of the model surface.

triangles = FVCOM['nv'].transpose() - 1  # offset for Python indexing.

extents = np.array((FVCOM['lon'].min(),
                    FVCOM['lon'].max(),
                    FVCOM['lat'].min(),
                    FVCOM['lat'].max()))

m = Basemap(llcrnrlon=extents[0],
            llcrnrlat=extents[2],
            urcrnrlon=extents[1],
            urcrnrlat=extents[3],
            rsphere=(6378137.00, 6356752.3142),
コード例 #4
0
import os
import numpy as np
import matplotlib.pyplot as plt

from glob import glob
from mpl_toolkits.basemap import Basemap
from PyFVCOM.read_FVCOM_results import ncread


if __name__ == '__main__':

    files = glob(os.path.join('raw_data', '*.qxf'))

    data = {}
    for file in files:
        data[file] = ncread(file, vars=['ALONAT01', 'ALATAT01', 'ALONAG01', 'ALATAG01'], noisy=True)

    # Do a quick plot.
    m = Basemap(llcrnrlon=-12,
                llcrnrlat=49,
                urcrnrlon=-4,
                urcrnrlat=53,
                rsphere=(6378137.00, 6356752.3142),
                resolution='i',
                projection='merc',
                area_thresh=0.2,
                lon_0=0,
                lat_0=50.5,
                lat_ts=50.5)
    parallels = np.arange(40, 60, 0.5)
    meridians = np.arange(-20, 20, 1)
コード例 #5
0
                    SeriesAvailability TEXT COLLATE nocase, \
                    Warnings TEXT COLLATE nocase, \
                    Licence TEXT COLLATE nocase, \
                    OriginalKeys TEXT COLLATE nocase);')

            # Now add the metadata.
            for row in reader:
                if noisy:
                    print('Adding station {}'.format(row['BODC reference']))

                try:
                    # Save the current site ID.
                    site = 'b{:07d}'.format(int(row['BODC reference']))

                    ncfile = os.path.join(base, 'raw_data', '{}.qxf'.format(site))
                    data = ncread(ncfile)

                    # Since the BODC data has a bunch of different codes for
                    # the same thing then I have to search for one of a few
                    # different codes in each file. This is a bit of a pain.
                    # Save these keys to the Staions table in case I have
                    # messed up the groupings in findNamesBOCD.
                    keys = findNamesBODC(data)

                    # If we don't have a depth key, but we do have a pressure
                    # one, calculate the depth using the UNESCO Fofonoff and
                    # Millard (1983) equation.
                    if keys['depth'] is None and keys['pressure'] is not None:
                        keys['depth'] = 'my_depth'
                        data['my_depth'] = pressure2depth(data[keys['pressure']], float(row['Latitude A']))