コード例 #1
0
def load_state(dt):
    prmsl = twcr.load('prmsl', dt, version='2c')
    prmsl = to_analysis_grid(prmsl.extract(iris.Constraint(member=1)))
    t2m = twcr.load('air.2m', dt, version='2c')
    t2m = to_analysis_grid(t2m.extract(iris.Constraint(member=1)))
    u10m = twcr.load('uwnd.10m', dt, version='2c')
    u10m = to_analysis_grid(u10m.extract(iris.Constraint(member=1)))
    v10m = twcr.load('vwnd.10m', dt, version='2c')
    v10m = to_analysis_grid(v10m.extract(iris.Constraint(member=1)))
    return (t2m, prmsl, u10m, v10m)
コード例 #2
0
ファイル: test_load.py プロジェクト: pbrohan/IRData
 def test_fetch_mslp(self):
     with self.assertRaises(Exception) as cm:
         tc = twcr.load('mslp',
                        datetime.datetime(1969, 3, 12),
                        version=version)
     self.assertIn("One or more of the files specified did not exist",
                   str(cm.exception))
コード例 #3
0
def qc_compare_reanalysis(obs, variable='prmsl', version='2c'):
    """Get 20CR ensemble values at the time and place of each observation.

    Args:
        obs (:obj:`pandas.DataFrame`): Observations. Dataframe must have columns 'latitude', 'longitude', and 'dtm' - the last a datetime.datetime.
        variable (:obj:`str`): Which 20CR variable to compare to. Defaults to 'prmsl'
        version (:obj:`str`): Which 20CR version to load data from. Defaults to '2c'

    Returns
        :obj:`pandas.Series`: Reanalyis ensemble associated with each observation.

    |
    """

    old_idx = obs.index
    obs = obs.reset_index()  # index values 0-n
    ob_times = obs['dtm'].unique()
    results = [None] * len(obs)
    for ob_time in ob_times:
        ot = pandas.to_datetime(ob_time)
        ensemble = twcr.load(variable, ot, version=version)
        # Units hack - assumes obs in hPa (if prmsl)
        if variable == 'prmsl':
            ensemble.data = ensemble.data / 100.0  # to hPa
        interpolator = iris.analysis.Linear().interpolator(
            ensemble, ['latitude', 'longitude'])
        this_time = obs['dtm'][obs['dtm'] == ob_time].index
        for ob_idx in this_time:
            ensemble = interpolator(
                [obs.latitude[ob_idx], obs.longitude[ob_idx]])
            results[ob_idx] = ensemble.data

    return pandas.Series(results, index=old_idx)
コード例 #4
0
def get_compressed(year,month,day,hour):
    prmsl=twcr.load('prmsl',datetime.datetime(year,month,day,hour),
                               version='2c')
    prmsl=to_analysis_grid(prmsl.extract(iris.Constraint(member=1)))
    t2m=twcr.load('air.2m',datetime.datetime(year,month,day,hour),
                               version='2c')
    t2m=to_analysis_grid(t2m.extract(iris.Constraint(member=1)))
    u10m=twcr.load('uwnd.10m',datetime.datetime(year,month,day,hour),
                               version='2c')
    u10m=to_analysis_grid(u10m.extract(iris.Constraint(member=1)))
    v10m=twcr.load('vwnd.10m',datetime.datetime(year,month,day,hour),
                               version='2c')
    v10m=to_analysis_grid(v10m.extract(iris.Constraint(member=1)))
    insol=to_analysis_grid(load_insolation(year,month,day,hour))

    # Convert the validation data into tensor format
    t2m_t = tf.convert_to_tensor(normalise_t2m(t2m.data),numpy.float32)
    t2m_t = tf.reshape(t2m_t,[79,159,1])
    prmsl_t = tf.convert_to_tensor(normalise_prmsl(prmsl.data),numpy.float32)
    prmsl_t = tf.reshape(prmsl_t,[79,159,1])
    u10m_t = tf.convert_to_tensor(normalise_wind(u10m.data),numpy.float32)
    u10m_t = tf.reshape(u10m_t,[79,159,1])
    v10m_t = tf.convert_to_tensor(normalise_wind(v10m.data),numpy.float32)
    v10m_t = tf.reshape(v10m_t,[79,159,1])
    insol_t = tf.convert_to_tensor(normalise_insolation(insol.data),numpy.float32)
    insol_t = tf.reshape(insol_t,[79,159,1])

    ict = tf.concat([t2m_t,prmsl_t,u10m_t,v10m_t,insol_t],2) # Now [79,159,5]
    ict = tf.reshape(ict,[1,79,159,5])
    result = autoencoder.predict_on_batch(ict)
    result = tf.reshape(result,[79,159,5])
    ls = encoder.predict_on_batch(ict)
    
    # Convert the encoded fields back to unnormalised cubes 
    t2m_r=t2m.copy()
    t2m_r.data = tf.reshape(result.numpy()[:,:,0],[79,159]).numpy()
    t2m_r.data = unnormalise_t2m(t2m_r.data)
    prmsl_r=prmsl.copy()
    prmsl_r.data = tf.reshape(result.numpy()[:,:,1],[79,159]).numpy()
    prmsl_r.data = unnormalise_prmsl(prmsl_r.data)
    u10m_r=u10m.copy()
    u10m_r.data = tf.reshape(result.numpy()[:,:,2],[79,159]).numpy()
    u10m_r.data = unnormalise_wind(u10m_r.data)
    v10m_r=v10m.copy()
    v10m_r.data = tf.reshape(result.numpy()[:,:,3],[79,159]).numpy()
    v10m_r.data = unnormalise_wind(v10m_r.data)
    return {'t2m':t2m_r,'prmsl':prmsl_r,'u10m':u10m_r,'v10m':v10m_r,'ls':ls}
コード例 #5
0
def load_daily_scout(year,month,day,version):
    e=[]
    for hour in (0,3,6,9,12,15,18,21):
         f=twcr.load('prmsl',datetime.datetime(year,month,day,hour),version=version)
         e.append(f)
    e=iris.cube.CubeList(e).merge_cube()
    e=e.collapsed('time', iris.analysis.MEAN)
    return(e)
コード例 #6
0
def load_daily(year, month, day):
    e = []
    for hour in (0, 3, 6, 9, 12, 15, 18, 21):
        f = twcr.load('tmp',
                      datetime.datetime(year, month, day, hour),
                      height=2,
                      version='4.6.1')
        e.append(f)
    e = iris.cube.CubeList(e).merge_cube()
    e = e.collapsed('time', iris.analysis.MEAN)
    return (e)
コード例 #7
0
ファイル: test_load.py プロジェクト: pbrohan/IRData
 def test_load_prmsl_interpolated(self):
     fake_data_file('prmsl', version, 2010)
     with patch.object(iris, 'load_cube',
                       side_effect=fake_cube) as mock_load:
         tc = twcr.load('prmsl',
                        datetime.datetime(2010, 3, 12, 9),
                        version=version)
     # Right dimensions
     self.assertEqual(len(tc.coords()), 4)
     # Right ensemble dimension name?
     self.assertEqual(tc.coords()[0].long_name, 'member')
     # Right time?
     self.assertEqual(tc.coords()[3].points[0], 965937)
コード例 #8
0
def get_cycles(dte):

    ic = twcr.load('air.2m',
                   datetime.datetime(dte.year, dte.month, dte.day, dte.hour),
                   version='2c')
    ic = ic.extract(iris.Constraint(member=1))
    ic = rr_cube(ic)
    ic = normalise_t2m(ic)
    t2m = tf.convert_to_tensor(ic.data, numpy.float32)
    t2m = tf.reshape(t2m, [79, 159, 1])
    ic = twcr.load('prmsl',
                   datetime.datetime(dte.year, dte.month, dte.day, dte.hour),
                   version='2c')
    ic = ic.extract(iris.Constraint(member=1))
    ic = rr_cube(ic)
    ic = normalise_prmsl(ic)
    prmsl = tf.convert_to_tensor(ic.data, numpy.float32)
    prmsl = tf.reshape(prmsl, [79, 159, 1])
    ic = twcr.load('uwnd.10m',
                   datetime.datetime(dte.year, dte.month, dte.day, dte.hour),
                   version='2c')
    ic = ic.extract(iris.Constraint(member=1))
    ic = rr_cube(ic)
    ic = normalise_wind(ic)
    uwnd = tf.convert_to_tensor(ic.data, numpy.float32)
    uwnd = tf.reshape(uwnd, [79, 159, 1])
    ic = twcr.load('vwnd.10m',
                   datetime.datetime(dte.year, dte.month, dte.day, dte.hour),
                   version='2c')
    ic = ic.extract(iris.Constraint(member=1))
    ic = rr_cube(ic)
    ic = normalise_wind(ic)
    vwnd = tf.convert_to_tensor(ic.data, numpy.float32)
    vwnd = tf.reshape(vwnd, [79, 159, 1])
    ict = tf.concat([t2m, prmsl, uwnd, vwnd], 2)
    ict = tf.reshape(ict, [1, 79, 159, 4])

    res = encoder.predict_on_batch(ict)
    return (res)
コード例 #9
0
def prepare_data(date,
                 purpose='training',
                 source='20CR2c',
                 variable='prmsl',
                 member=1,
                 normalise=None,
                 opfile=None):
    """Make tf.load-able files, suitably normalised for training ML models 

    Data will be stored in directory $SCRATCH/Machine-Learning-experiments.

    Args:
        date (obj:`datetime.datetime`): datetime to get data for.
        purpose (:obj:`str`): 'training' (default) or 'test'.
        source (:obj:`str`): Where to get the data from - at the moment, only '20CR2c' is supported .
        variable (:obj:`str`): Variable to use (e.g. 'prmsl')
        normalise: (:obj:`func`): Function to normalise the data (to mean=0, sd=1). Function must take an :obj:`iris.cube.cube' as argument and returns a normalised cube as result. If None (default) use a standard normalisation function (see :func:`normalise`.

    Returns:
        Nothing, but creates, as side effect, a tf.load-able file with the normalised data for the given source, variable, and date.

    Raises:
        ValueError: Unsupported source, or can't load the original data, or normalisation failed.

    |
    """
    if opfile is None:
        opfile = ("%s/Machine-Learning-experiments/datasets/%s/%s/%s" %
                  (os.getenv('SCRATCH'), source, variable, purpose))
    if not os.path.isdir(os.path.dirname(opfile)):
        os.makedirs(os.path.dirname(opfile))

    ic = twcr.load(variable,
                   datetime.datetime(date.year, date.month, date.day,
                                     date.hour),
                   version=args.version)

    # Reduce to selected ensemble member
    ic = ic.extract(iris.Constraint(member=args.member))

    # Normalise (to mean=0, sd=1)
    if normalise is None:
        normalise = get_normalise_function(source, variable)
    ic.data = normalise(ic.data)

    # Convert to Tensor
    ict = tf.convert_to_tensor(ic.data, numpy.float32)

    # Write to tfrecord file
    sict = tf.serialize_tensor(ict)
    tf.io.write_file(opfile, sict)
    args.month,
    args.day,
)
if not os.path.isdir(args.opdir):
    os.makedirs(args.opdir)

# Get the catchment mask
cGrid = iris.load_cube("%s/../../make_catchment_mask/mask.PRMSL.256x512.nc" %
                       os.path.dirname(__file__))
coord_s = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS)
cGrid.coord("latitude").coord_system = coord_s
cGrid.coord("longitude").coord_system = coord_s

dte = datetime.datetime(args.year, args.month, args.day, int(args.hour),
                        int((args.hour % 1) * 60))
rdata = twcr.load(args.var, dte, version=args.version)
rdata = iris.util.squeeze(rdata)
rdata.coord("latitude").guess_bounds()
rdata.coord("longitude").guess_bounds()
grid_areas = iris.analysis.cartography.area_weights(rdata)
cGrid = cGrid.regrid(rdata, iris.analysis.Linear())
mdata = iris.util.broadcast_to_shape(cGrid.data, rdata.data.shape, (1, 2))
rdata.data = numpy.ma.masked_where(mdata < 0.5, rdata.data)
ensemble = rdata.collapsed(["longitude", "latitude"],
                           iris.analysis.MEAN,
                           weights=grid_areas)

pickle.dump(
    ensemble.data.data,
    open(
        "%s/%02d_%02d.pkl" %
コード例 #11
0
parser.add_argument("--day", help="Day of month", type=int, required=True)
parser.add_argument("--hour",
                    help="Hour of day (0 to 23)",
                    type=int,
                    required=True)
parser.add_argument("--member",
                    help="Ensemble member",
                    default=1,
                    type=int,
                    required=False)

args = parser.parse_args()

# Get the 20CR data
ic = twcr.load('prmsl',
               datetime.datetime(args.year, args.month, args.day, args.hour),
               version='2c')
ic = ic.extract(iris.Constraint(member=args.member))

# Normalisation - Pa to mean=0, sd=1 - and back
normalise = ML_Utilities.get_normalise_function(source='20CR2c',
                                                variable='prmsl')
unnormalise = ML_Utilities.get_unnormalise_function(source='20CR2c',
                                                    variable='prmsl')

# Get the original autoencoder
model_save_file = ("%s/Machine-Learning-experiments/simple_autoencoder/" +
                   "saved_models/Epoch_%04d") % (os.getenv('SCRATCH'), 100)
autoencoder_original = tf.keras.models.load_model(model_save_file)

# Run the data through the original autoencoder and convert back to iris cube
コード例 #12
0
                    type=float,
                    required=True)
parser.add_argument("--opdir",
                    help="Directory for output files",
                    default=".",
                    type=str,
                    required=False)
args = parser.parse_args()
if not os.path.isdir(args.opdir):
    os.makedirs(args.opdir)

dte = datetime.datetime(args.year, args.month, args.day, int(args.hour),
                        int(args.hour % 1 * 60))

# Plot the temperature
t2m = twcr.load('air.2m', dte, version='2c')
t2m = t2m.extract(iris.Constraint(member=1))
s = t2m.data.shape
t2m.data = qcut(t2m.data.flatten(), 20, labels=False).reshape(s)

# Define the figure (page size, background color, resolution, ...
aspect = 16 / 9.0
fig = Figure(
    figsize=(10.8 * aspect, 10.8),  # HD video 
    dpi=100,
    facecolor=(0.88, 0.88, 0.88, 1),
    edgecolor=None,
    linewidth=0.0,
    frameon=False,  # Don't draw a frame
    subplotpars=None,
    tight_layout=None)
コード例 #13
0
                       [0.85380,0.22170,0.02677],[0.84662,0.21407,0.02487],[0.83926,0.20654,0.02305],
                       [0.83172,0.19912,0.02131],[0.82399,0.19182,0.01966],[0.81608,0.18462,0.01809],
                       [0.80799,0.17753,0.01660],[0.79971,0.17055,0.01520],[0.79125,0.16368,0.01387],
                       [0.78260,0.15693,0.01264],[0.77377,0.15028,0.01148],[0.76476,0.14374,0.01041],
                       [0.75556,0.13731,0.00942],[0.74617,0.13098,0.00851],[0.73661,0.12477,0.00769],
                       [0.72686,0.11867,0.00695],[0.71692,0.11268,0.00629],[0.70680,0.10680,0.00571],
                       [0.69650,0.10102,0.00522],[0.68602,0.09536,0.00481],[0.67535,0.08980,0.00449],
                       [0.66449,0.08436,0.00424],[0.65345,0.07902,0.00408],[0.64223,0.07380,0.00401],
                       [0.63082,0.06868,0.00401],[0.61923,0.06367,0.00410],[0.60746,0.05878,0.00427],
                       [0.59550,0.05399,0.00453],[0.58336,0.04931,0.00486],[0.57103,0.04474,0.00529],
                       [0.55852,0.04028,0.00579],[0.54583,0.03593,0.00638],[0.53295,0.03169,0.00705],
                       [0.51989,0.02756,0.00780],[0.50664,0.02354,0.00863],[0.49321,0.01963,0.00955],
                       [0.47960,0.01583,0.01055]]

# Plot the temperature
t2m=twcr.load('air.2m',dte,version='2c')
t2m=t2m.extract(iris.Constraint(member=1))
# Regrid to plot coordinates
plot_cube=mg.utils.dummy_cube(ax,0.25)
t2m = t2m.regrid(plot_cube,iris.analysis.Linear())
# Re-map to highlight small differences
s=t2m.data.shape
t2m.data=qcut(t2m.data.flatten(),20,labels=False).reshape(s)
# Plot as a colour map
lats = t2m.coord('latitude').points
lons = t2m.coord('longitude').points
t2m_img=ax.pcolorfast(lons, lats, t2m.data,
                      cmap=ListedColormap(turbo_colormap_data),
                      vmin=0,
                      vmax=20,
                      alpha=0.5)
コード例 #14
0
# Plot the hidden layer weights
def plot_hidden(weights):
    # Single axes - var v. time
    ax = fig.add_axes([0.05, 0.425, 0.9, 0.15])
    # Axes ranges from data
    ax.set_xlim(-0.6, len(weights) - 0.4)
    ax.set_ylim(0, numpy.max(numpy.abs(weights)) * 1.05)
    ax.bar(x=range(len(weights)),
           height=numpy.abs(weights[order]),
           color='grey',
           tick_label=order)


# Get a 20CR data for the grid metadata
ic = twcr.load('prmsl', datetime.datetime(1969, 3, 12, 6), version='2c')
ic = ic.extract(iris.Constraint(member=1))

# Get the 9 neuron autoencoder
model_save_file = (("%s/Machine-Learning-experiments/" +
                    "simple_autoencoder_activations/elu/" +
                    "saved_models/Epoch_%04d")) % (os.getenv('SCRATCH'), 100)
autoencoder = tf.keras.models.load_model(model_save_file)

# Get the order of the hidden weights - most to least important
order = numpy.argsort(numpy.abs(autoencoder.get_weights()[1]))[::-1]

# Make a comparison plot - Input, hidden, and output weights
fig = Figure(
    figsize=(10, 12),  # Width, Height (inches)
    dpi=100,
コード例 #15
0
    # Longitudes cover -180 to 180 with 159 values
    lon_values = numpy.arange(-180, 181, 360 / 158)
    longitude = iris.coords.DimCoord(lon_values,
                                     standard_name='longitude',
                                     units='degrees_east',
                                     coord_system=cs)
    dummy_data = numpy.zeros((len(lat_values), len(lon_values)))
    dummy_cube = iris.cube.Cube(dummy_data,
                                dim_coords_and_dims=[(latitude, 0),
                                                     (longitude, 1)])
    n_cube = cbe.regrid(dummy_cube, iris.analysis.Linear())
    return (n_cube)


# Get the 20CR data
ic = twcr.load('prmsl', datetime.datetime(2009, 3, 12, 18), version='2c')
ic = rr_cube(ic.extract(iris.Constraint(member=1)))

# Get the autoencoder
model_save_file = ("%s/Machine-Learning-experiments/" +
                   "convolutional_autoencoder_perturbations/" +
                   "rotated+scaled/saved_models/Epoch_%04d") % (
                       os.getenv('SCRATCH'), 50)
autoencoder = tf.keras.models.load_model(model_save_file)


# Normalisation - Pa to mean=0, sd=1 - and back
def normalise(x):
    x -= 101325
    x /= 3000
    return x
コード例 #16
0
                    required=True)

args = parser.parse_args()

start = datetime.datetime(1902, 8, 1, 0) + datetime.timedelta(hours=args.hour)
end = datetime.datetime(1902, 8, 31, 23, 59)
opdir = "%s/simple_climatologies/20CRv3/August_1902" % os.getenv('SCRATCH')
if not os.path.isdir(opdir):
    os.makedirs(opdir)

# Make the field average over the time
accum = None
current = start
count = 0
while current < end:
    rdata = twcr.load(args.var, current, level=925, version=args.version)
    rdata = rdata.collapsed('member', iris.analysis.MEAN)
    if accum is None:
        accum = rdata
    else:
        accum.data = accum.data + rdata.data
    count = count + 1
    current = current + datetime.timedelta(days=1)

# pickle the field mean
accum.data = accum.data / count
opfile = "%s/%s_925_%s.pkl" % (opdir, args.var, args.version)
fh = open(opfile, 'wb')
pickle.dump(accum, fh)
fh.close()
コード例 #17
0
# China-centred projection
projection=ccrs.RotatedPole(pole_longitude=287.5, pole_latitude=55.5)
scale=25
extent=[scale*-1*aspect/3.0,scale*aspect/3.0,scale*-1,scale]

# On the left - spaghetti-contour plot of original 20CRv3
ax_left=fig.add_axes([0.005,0.01,0.323,0.98],projection=projection)
ax_left.set_axis_off()
ax_left.set_extent(extent, crs=projection)
ax_left.background_patch.set_facecolor((0.88,0.88,0.88,1))
mg.background.add_grid(ax_left)
land_img_left=ax_left.background_img(name='GreyT', resolution='low')

# 20CRv3 data
prmsl=twcr.load('PRMSL',dte,version='3')

# 20CRv3 data
obs_t=twcr.load_observations_fortime(dte,version='3')

# Plot the observations
mg.observations.plot(ax_left,obs_t,radius=0.2)

# PRMSL spaghetti plot
mg.pressure.plot(ax_left,prmsl,scale=0.01,type='spaghetti',
                   resolution=0.25,
                   levels=numpy.arange(875,1050,10),
                   colors='blue',
                   label=False,
                   linewidths=0.1)
コード例 #18
0
import datetime
import argparse
import os
import math
import pickle

import Meteorographica as mg

import matplotlib
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
import cartopy
import cartopy.crs as ccrs

# Get the 20CR data
source = twcr.load('prmsl', datetime.datetime(2009, 3, 12, 18), version='2c')
source = source.extract(iris.Constraint(member=1))
target = twcr.load('prmsl',
                   datetime.datetime(2009, 3, 12, 18) +
                   datetime.timedelta(hours=6),
                   version='2c')
target = target.extract(iris.Constraint(member=1))
target = target - source


# Need to resize data so it's dimensions are a multiple of 8 (3*2-fold pool)
class ResizeLayer(tf.keras.layers.Layer):
    def __init__(self, newsize=None, **kwargs):
        super(ResizeLayer, self).__init__(**kwargs)
        self.resize_newsize = newsize
コード例 #19
0
    res = p.copy()
    res.data -= 101325
    res.data /= 3000
    return res


def unnormalise_prmsl(p):
    res = p.copy()
    res.data *= 3000
    res.data += 101325
    return res


# Load the validation data
prmsl = twcr.load('prmsl',
                  datetime.datetime(args.year, args.month, args.day,
                                    args.hour),
                  version='2c')
prmsl = tensor_cube(prmsl.extract(iris.Constraint(member=1)))
t2m = twcr.load('air.2m',
                datetime.datetime(args.year, args.month, args.day, args.hour),
                version='2c')
t2m = tensor_cube(t2m.extract(iris.Constraint(member=1)))
u10m = twcr.load('uwnd.10m',
                 datetime.datetime(args.year, args.month, args.day, args.hour),
                 version='2c')
u10m = tensor_cube(u10m.extract(iris.Constraint(member=1)))
v10m = twcr.load('vwnd.10m',
                 datetime.datetime(args.year, args.month, args.day, args.hour),
                 version='2c')
v10m = tensor_cube(v10m.extract(iris.Constraint(member=1)))
コード例 #20
0
ファイル: compare_tpuv.py プロジェクト: philip-brohan/ML_GCM
    prmsl = prmsl.regrid(prmsl_pc, iris.analysis.Linear())
    lats = prmsl.coord('latitude').points
    lons = prmsl.coord('longitude').points
    lons, lats = numpy.meshgrid(lons, lats)
    CS = ax.contour(lons,
                    lats,
                    prmsl.data * 0.01,
                    colors='black',
                    linewidths=0.5,
                    alpha=1.0,
                    levels=numpy.arange(870, 1050, 10),
                    zorder=200)


# Load the validation data
prmsl = twcr.load('prmsl', datetime.datetime(2010, 3, 12, 18), version='2c')
prmsl = to_analysis_grid(prmsl.extract(iris.Constraint(member=1)))
t2m = twcr.load('air.2m', datetime.datetime(2010, 3, 12, 18), version='2c')
t2m = to_analysis_grid(t2m.extract(iris.Constraint(member=1)))
u10m = twcr.load('uwnd.10m', datetime.datetime(2010, 3, 12, 18), version='2c')
u10m = to_analysis_grid(u10m.extract(iris.Constraint(member=1)))
v10m = twcr.load('vwnd.10m', datetime.datetime(2010, 3, 12, 18), version='2c')
v10m = to_analysis_grid(v10m.extract(iris.Constraint(member=1)))
insol = to_analysis_grid(load_insolation(2010, 3, 12, 18))

# Convert the validation data into tensor format
t2m_t = tf.convert_to_tensor(normalise_t2m(t2m.data), numpy.float32)
t2m_t = tf.reshape(t2m_t, [79, 159, 1])
prmsl_t = tf.convert_to_tensor(normalise_prmsl(prmsl.data), numpy.float32)
prmsl_t = tf.reshape(prmsl_t, [79, 159, 1])
u10m_t = tf.convert_to_tensor(normalise_wind(u10m.data), numpy.float32)
コード例 #21
0
    # Longitudes cover -180 to 180 with 159 values
    lon_values = numpy.arange(-180, 181, 360 / 158)
    longitude = iris.coords.DimCoord(lon_values,
                                     standard_name='longitude',
                                     units='degrees_east',
                                     coord_system=cs)
    dummy_data = numpy.zeros((len(lat_values), len(lon_values)))
    dummy_cube = iris.cube.Cube(dummy_data,
                                dim_coords_and_dims=[(latitude, 0),
                                                     (longitude, 1)])
    n_cube = cbe.regrid(dummy_cube, iris.analysis.Linear())
    return (n_cube)


# Get the 20CR data
ic = twcr.load('prate', datetime.datetime(2010, 3, 12, 18), version='2c')
ic = rr_cube(ic.extract(iris.Constraint(member=1)))
ic.data = ic.data * 1000 + 1.001
ic.data = numpy.log(ic.data)

# Get the autoencoder
model_save_file = ("%s/Machine-Learning-experiments/" +
                   "convolutional_autoencoder_perturbations/" +
                   "rotated+scaled_precip/saved_models/Epoch_%04d") % (
                       os.getenv('SCRATCH'), 50)
autoencoder = tf.keras.models.load_model(model_save_file)

fig = Figure(
    figsize=(9.6, 10.8),  # 1/2 HD
    dpi=100,
    facecolor=(0.88, 0.88, 0.88, 1),
コード例 #22
0
# China-centred projection
projection = ccrs.RotatedPole(pole_longitude=287.5, pole_latitude=55.5)
scale = 25
extent = [scale * -1 * aspect / 3.0, scale * aspect / 3.0, scale * -1, scale]

# On the left - spaghetti-contour plot of original 20CRv3
ax_left = fig.add_axes([0.005, 0.01, 0.323, 0.98], projection=projection)
ax_left.set_axis_off()
ax_left.set_extent(extent, crs=projection)
ax_left.background_patch.set_facecolor((0.88, 0.88, 0.88, 1))
mg.background.add_grid(ax_left)
land_img_left = ax_left.background_img(name='GreyT', resolution='low')

# Scout 4.6.1 data
prmsl = twcr.load('prmsl', dte, version='4.6.1')
obs_t = twcr.load_observations_fortime(dte, version='4.6.1')

# Plot the observations
mg.observations.plot(ax_left, obs_t, radius=0.2)

# PRMSL spaghetti plot
mg.pressure.plot(ax_left,
                 prmsl,
                 scale=0.01,
                 type='spaghetti',
                 resolution=0.25,
                 levels=numpy.arange(875, 1050, 10),
                 colors='blue',
                 label=False,
                 linewidths=0.1)
コード例 #23
0
parser.add_argument("--hour", help="Hour of day (0 to 23)",
                    type=int,required=True)
parser.add_argument("--member", help="Ensemble member",
                    default=1,type=int,required=False)
parser.add_argument("--version", help="20CR version",
                    default='2c',type=str,required=False)
parser.add_argument("--variable", help="20CR variable",
                    default='prmsl',type=str,required=False)
parser.add_argument("--epoch", help="Model at which epoch?",
                    type=int,required=True)

args = parser.parse_args()

# Get the 20CR data
ic=twcr.load(args.variable,datetime.datetime(args.year,args.month,
                                            args.day,args.hour),
                           version=args.version)
ic=ic.extract(iris.Constraint(member=args.member))

# Get the autoencoder
model_save_file=("%s/Machine-Learning-experiments/deep_autoencoder/"+
                 "saved_models/Epoch_%04d") % (
                 os.getenv('SCRATCH'),args.epoch)
autoencoder=tf.keras.models.load_model(model_save_file)

# Normalisation - Pa to mean=0, sd=1 - and back
def normalise(x):
   x -= 101325
   x /= 3000
   return x
コード例 #24
0
# South America centred projection
projection = ccrs.RotatedPole(pole_longitude=120, pole_latitude=125)
scale = 25
extent = [scale * -1 * aspect / 3.0, scale * aspect / 3.0, scale * -1, scale]

# On the left - spaghetti-contour plot of original 20CRv3
ax_left = fig.add_axes([0.005, 0.01, 0.323, 0.98], projection=projection)
ax_left.set_axis_off()
ax_left.set_extent(extent, crs=projection)
ax_left.background_patch.set_facecolor((0.88, 0.88, 0.88, 1))
mg.background.add_grid(ax_left)
land_img_left = ax_left.background_img(name='GreyT', resolution='low')

# 20CRv3 data
prmsl = twcr.load('prmsl', dte, version='4.5.1')
obs_t = twcr.load_observations_fortime(dte, version='4.5.1')

# Plot the observations
mg.observations.plot(ax_left, obs_t, radius=0.2)

# PRMSL spaghetti plot
mg.pressure.plot(ax_left,
                 prmsl,
                 scale=0.01,
                 type='spaghetti',
                 resolution=0.25,
                 levels=numpy.arange(875, 1050, 10),
                 colors='blue',
                 label=False,
                 linewidths=0.1)
コード例 #25
0
ax_one.background_patch.set_facecolor((0.88, 0.88, 0.88, 1))
ax_three.background_patch.set_facecolor((0.88, 0.88, 0.88, 1))
mg.background.add_grid(ax_one)
mg.background.add_grid(ax_three)
land_img_one = ax_one.background_img(name='GreyT', resolution='low')
land_img_three = ax_three.background_img(name='GreyT', resolution='low')

# Get the DWR observations for that afternoon
obs = DWR.load_observations('prmsl', dte - datetime.timedelta(hours=0.1),
                            dte + datetime.timedelta(hours=0.1))
# Reduce to Fort William only
obs_assimilate = obs[obs.name == 'FORTWILLIAM']
obs_assimilate.value = obs_assimilate.value * 100  # to Pa

# 20CRv3 data
prmsl = twcr.load('prmsl', dte, version='4.5.1')
# Get the observations used in 20CRv3
obs_t = twcr.load_observations_fortime(dte, version='4.5.1')
# Filter to those assimilated and near the UK
obs_s = obs_t.loc[((obs_t['Latitude'] > 0) & (obs_t['Latitude'] < 90)) & (
    (obs_t['Longitude'] > 240) | (obs_t['Longitude'] < 100))].copy()

# Update mslp by assimilating Fort William ob.
prmsl2 = DIYA.constrain_cube(
    prmsl,
    lambda dte: twcr.load('prmsl', dte, version='4.5.1'),
    obs=obs_assimilate,
    obs_error=100,
    random_state=RANDOM_SEED,
    model=sklearn.linear_model.LinearRegression(),
    lat_range=(20, 85),
コード例 #26
0
    longitude = iris.coords.DimCoord(lon_values,
                                     standard_name='longitude',
                                     units='degrees_east',
                                     coord_system=cs)
    dummy_data = numpy.zeros((len(lat_values), len(lon_values)))
    dummy_cube = iris.cube.Cube(dummy_data,
                                dim_coords_and_dims=[(latitude, 0),
                                                     (longitude, 1)])
    n_cube = cbe.regrid(dummy_cube, iris.analysis.Linear())
    return (n_cube)


if args.source == '20CR2c':
    ic = twcr.load(
        args.variable,
        datetime.datetime(args.year, args.month, args.day, args.hour) +
        datetime.timedelta(hours=6),
        version='2c')
    ic = ic.extract(iris.Constraint(member=args.member))
    ic = rr_cube(ic)
    # Normalise to range 0-1 (approx)
    if args.variable == 'uwnd.10m' or args.variable == 'vwnd.10m':
        ic = normalise_wind(ic)
    elif args.variable == 'air.2m':
        ic = normalise_t2m(ic)
    elif args.variable == 'prate':
        ic = normalise_precip(ic)
    elif args.variable == 'prmsl':
        ic = normalise_prmsl(ic)

else:
コード例 #27
0
ファイル: 20CRv2c_4var.py プロジェクト: philip-brohan/ML_GCM
                    type=float,
                    required=False)
parser.add_argument("--opdir", help="Directory for output files",
                    default="%s/images/20CRv2c_global_4var" % \
                                           os.getenv('SCRATCH'),
                    type=str,required=False)

args = parser.parse_args()
if not os.path.isdir(args.opdir):
    os.makedirs(args.opdir)

dte = datetime.datetime(args.year, args.month, args.day, int(args.hour),
                        int(args.hour % 1 * 60))

# Load the model data - dealing sensibly with missing fields
t2m = twcr.load('air.2m', dte, version='2c')
t2m = t2m.extract(iris.Constraint(member=1))
t2m = quantile_normalise_t2m(t2m)

u10m = twcr.load('uwnd.10m', dte, version='2c')
u10m = u10m.extract(iris.Constraint(member=1))
v10m = twcr.load('vwnd.10m', dte, version='2c')
v10m = v10m.extract(iris.Constraint(member=1))
prmsl = twcr.load('prmsl', dte, version='2c')
prmsl = prmsl.extract(iris.Constraint(member=1))

mask = iris.load_cube("%s/fixed_fields/land_mask/opfc_global_2019.nc" %
                      os.getenv('SCRATCH'))

# Define the figure (page size, background color, resolution, ...
fig = Figure(
コード例 #28
0
# Assemble local copies of some data to be used in Meteorographica examples.
#
# Uses IRData (http://brohan.org/IRData/) to get the data.

import datetime
import iris
import IRData.twcr as twcr
import pickle
import gzip

dte = datetime.datetime(1987, 10, 16, 6)
for var in ('prmsl', 'uwnd.10m', 'vwnd.10m', 'prate'):
    twcr.fetch(var, dte, version='2c')
    cube = twcr.load(var, dte, version='2c')
    fname = "20CR2c.%04d%02d%02d%02d.%s.nc" % (dte.year, dte.month, dte.day,
                                               dte.hour, var)
    #iris.save(cube,fname,netcdf_format='NETCDF4',
    #          zlib=True,complevel=9)
twcr.fetch_observations(dte, version='2c')
obs = twcr.load_observations_fortime(dte, version='2c')
f = gzip.open(
    "20CR2c.%04d%02d%02d%02d.observations.pklz" %
    (dte.year, dte.month, dte.day, dte.hour), "wb")
pickle.dump(obs, f)
f.close()
コード例 #29
0
                                      self.resize_newsize,
                                      align_corners=True)

    def get_config(self):
        return {'newsize': self.resize_newsize}


year = 1916
month = 3
day = 12
hour = 6

# Get the 20CR data
#ic=twcr.load('prmsl',datetime.datetime(2009,3,12,18),
ic = twcr.load('prmsl',
               datetime.datetime(year, month, day, hour),
               version='2c')
ic = ic.extract(iris.Constraint(member=1))
#ic=ic.collapsed('member', iris.analysis.MEAN)

# Make the fake observations
obs = twcr.load_observations_fortime(datetime.datetime(1916, 3, 12, 6),
                                     version='2c')
ensemble = []
for index, row in obs.iterrows():
    ensemble.append(
        ic.interpolate([('latitude', row['Latitude']),
                        ('longitude', row['Longitude'])],
                       iris.analysis.Linear()).data.item())
ensemble = numpy.array(ensemble, dtype=numpy.float32)
ensemble = normalise(ensemble)
コード例 #30
0
# South America centred projection
projection = ccrs.RotatedPole(pole_longitude=120, pole_latitude=125)
scale = 25
extent = [scale * -1 * aspect / 3.0, scale * aspect / 3.0, scale * -1, scale]

# On the left - spaghetti-contour plot of original 20CRv3
ax_left = fig.add_axes([0.005, 0.01, 0.323, 0.98], projection=projection)
ax_left.set_axis_off()
ax_left.set_extent(extent, crs=projection)
ax_left.background_patch.set_facecolor((0.88, 0.88, 0.88, 1))
mg.background.add_grid(ax_left)
land_img_left = ax_left.background_img(name='GreyT', resolution='low')

# 20CRv3 data
at = twcr.load('tmp', dte, level=925, version='4.5.1')
aat = at.copy()
at.data = at.data - 273.15
clim = pickle.load(
    open(
        "%s/simple_climatologies/20CRv3/June_1902/air.2m_4.5.1.pkl" %
        os.getenv('SCRATCH'), "rb"))
for m in range(80):
    aat.data[m, :, :] = aat.data[m, :, :] - clim.data
obs_t = twcr.load_observations_fortime(dte, version='4.5.1')

# Plot the observations
mg.observations.plot(ax_left, obs_t, radius=0.2)

# PRMSL spaghetti plot
mg.pressure.plot(ax_left,