コード例 #1
0
def get_compressed(year,month,day,hour):
    prmsl=twcr.load('prmsl',datetime.datetime(year,month,day,hour),
                               version='2c')
    prmsl=to_analysis_grid(prmsl.extract(iris.Constraint(member=1)))
    t2m=twcr.load('air.2m',datetime.datetime(year,month,day,hour),
                               version='2c')
    t2m=to_analysis_grid(t2m.extract(iris.Constraint(member=1)))
    u10m=twcr.load('uwnd.10m',datetime.datetime(year,month,day,hour),
                               version='2c')
    u10m=to_analysis_grid(u10m.extract(iris.Constraint(member=1)))
    v10m=twcr.load('vwnd.10m',datetime.datetime(year,month,day,hour),
                               version='2c')
    v10m=to_analysis_grid(v10m.extract(iris.Constraint(member=1)))
    insol=to_analysis_grid(load_insolation(year,month,day,hour))

    # Convert the validation data into tensor format
    t2m_t = tf.convert_to_tensor(normalise_t2m(t2m.data),numpy.float32)
    t2m_t = tf.reshape(t2m_t,[79,159,1])
    prmsl_t = tf.convert_to_tensor(normalise_prmsl(prmsl.data),numpy.float32)
    prmsl_t = tf.reshape(prmsl_t,[79,159,1])
    u10m_t = tf.convert_to_tensor(normalise_wind(u10m.data),numpy.float32)
    u10m_t = tf.reshape(u10m_t,[79,159,1])
    v10m_t = tf.convert_to_tensor(normalise_wind(v10m.data),numpy.float32)
    v10m_t = tf.reshape(v10m_t,[79,159,1])
    insol_t = tf.convert_to_tensor(normalise_insolation(insol.data),numpy.float32)
    insol_t = tf.reshape(insol_t,[79,159,1])

    ict = tf.concat([t2m_t,prmsl_t,u10m_t,v10m_t,insol_t],2) # Now [79,159,5]
    ict = tf.reshape(ict,[1,79,159,5])
    result = autoencoder.predict_on_batch(ict)
    result = tf.reshape(result,[79,159,5])
    ls = encoder.predict_on_batch(ict)
    
    # Convert the encoded fields back to unnormalised cubes 
    t2m_r=t2m.copy()
    t2m_r.data = tf.reshape(result.numpy()[:,:,0],[79,159]).numpy()
    t2m_r.data = unnormalise_t2m(t2m_r.data)
    prmsl_r=prmsl.copy()
    prmsl_r.data = tf.reshape(result.numpy()[:,:,1],[79,159]).numpy()
    prmsl_r.data = unnormalise_prmsl(prmsl_r.data)
    u10m_r=u10m.copy()
    u10m_r.data = tf.reshape(result.numpy()[:,:,2],[79,159]).numpy()
    u10m_r.data = unnormalise_wind(u10m_r.data)
    v10m_r=v10m.copy()
    v10m_r.data = tf.reshape(result.numpy()[:,:,3],[79,159]).numpy()
    v10m_r.data = unnormalise_wind(v10m_r.data)
    return {'t2m':t2m_r,'prmsl':prmsl_r,'u10m':u10m_r,'v10m':v10m_r,'ls':ls}
コード例 #2
0
ファイル: compare_tpuv.py プロジェクト: philip-brohan/ML_GCM
u10m = twcr.load('uwnd.10m', datetime.datetime(2010, 3, 12, 18), version='2c')
u10m = to_analysis_grid(u10m.extract(iris.Constraint(member=1)))
v10m = twcr.load('vwnd.10m', datetime.datetime(2010, 3, 12, 18), version='2c')
v10m = to_analysis_grid(v10m.extract(iris.Constraint(member=1)))
insol = to_analysis_grid(load_insolation(2010, 3, 12, 18))

# Convert the validation data into tensor format
t2m_t = tf.convert_to_tensor(normalise_t2m(t2m.data), numpy.float32)
t2m_t = tf.reshape(t2m_t, [79, 159, 1])
prmsl_t = tf.convert_to_tensor(normalise_prmsl(prmsl.data), numpy.float32)
prmsl_t = tf.reshape(prmsl_t, [79, 159, 1])
u10m_t = tf.convert_to_tensor(normalise_wind(u10m.data), numpy.float32)
u10m_t = tf.reshape(u10m_t, [79, 159, 1])
v10m_t = tf.convert_to_tensor(normalise_wind(v10m.data), numpy.float32)
v10m_t = tf.reshape(v10m_t, [79, 159, 1])
insol_t = tf.convert_to_tensor(normalise_insolation(insol.data), numpy.float32)
insol_t = tf.reshape(insol_t, [79, 159, 1])

# Get autoencoded versions of the validation data
model_save_file = ("%s/ML_GCM/autoencoder.tst/" + "Epoch_%04d/autoencoder") % (
    os.getenv('SCRATCH'), args.epoch)
autoencoder = tf.keras.models.load_model(model_save_file, compile=False)
ict = tf.concat([t2m_t, prmsl_t, u10m_t, v10m_t, insol_t], 2)  # Now [79,159,5]
ict = tf.reshape(ict, [1, 79, 159, 5])
result = autoencoder.predict_on_batch(ict)
result = tf.reshape(result, [79, 159, 5])

# Convert the encoded fields back to unnormalised cubes
t2m_r = t2m.copy()
t2m_r.data = tf.reshape(result.numpy()[:, :, 0], [79, 159]).numpy()
t2m_r.data = unnormalise_t2m(t2m_r.data)
コード例 #3
0
# Don't distinguish between training and test for insolation.
#  Make a 'test' directory that's a copy of the 'training' directory'
tstdir = os.path.dirname(args.opfile).replace('training','test')
if not os.path.exists(tstdir):
   os.symlink(os.path.dirname(args.opfile),tstdir)

# Load the 20CR2c data as an iris cube
time_constraint=iris.Constraint(time=iris.time.PartialDateTime(
                                year=args.year,
                                month=args.month,
                                day=args.day,
                                hour=args.hour))
ic=iris.load_cube("%s/20CR/version_2c/ensmean/cduvb.1969.nc" % os.getenv('DATADIR'),
                  iris.Constraint(name='3-hourly Clear Sky UV-B Downward Solar Flux') &
                  time_constraint)
coord_s=iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS)
ic.coord('latitude').coord_system=coord_s
ic.coord('longitude').coord_system=coord_s

# Standardise
ic=to_analysis_grid(ic)
ic.data=normalise_insolation(ic.data)    

# Convert to Tensor
ict=tf.convert_to_tensor(ic.data, numpy.float32)

# Write to file
sict=tf.serialize_tensor(ict)
tf.write_file(args.opfile,sict)