def open_data_with_lts_and_path():
    ds = open_data('training').sel(time=slice(120,140))
    p = open_data('pressure')

    # make necessary computations
    ds['p'] = p
    ds['lat'] = ngaqua_y_to_lat(ds.y)
    # compute LTS and Mid Trop moisture
    ds['lts'] = lower_tropospheric_stability(ds.TABS, p, ds.SST, ds.Ps)
    ds['path'] = midtropospheric_moisture(ds.QV, p, bottom=850, top=600)
    return ds
Exemple #2
0
def get_data():

    variables = ['PW']

    # open NN run
    run = runs['debias']
    nn = run.data_2d.rename({'NPNN': 'net_precip'})

    # open NN run
    run = runs['unstable']
    unstable = run.data_2d.rename({'NPNN': 'net_precip'})
    time = float(unstable.time[-1])
    print(time)

    # open microphysics
    run = runs['micro']
    micro = run.data_2d
    micro['net_precip'] = micro.Prec - lhf_to_evap(micro.LHF)

    # open NGAqua
    ng = open_data('ngaqua_2d')
    ng['net_precip'] = ng.Prec - lhf_to_evap(ng.LHF)
    # make sure the x and y value agree
    ng = ng.assign(x=nn.x, y=nn.y)

    runs_at_time = {
        'NG-Aqua': ng[variables].interp(time=time),
        'NN-Lower': nn[variables].interp(time=time),
        'Base': micro[variables].interp(time=time),
        f'NN-All': unstable[variables].interp(time=time)
    }

    return xr.concat(list(runs_at_time.values()),
                     dim=list(runs_at_time.keys()))
Exemple #3
0
def get_ng_and_semiprog():
    # open model
    model = common.get_model('NN-Lower')
    # get data
    ds = open_data('training').sel(time=slice(100,115))

    def integrate_moist(src):
        return (src * ds.layer_mass).sum('z')/1000

    q2 = compute_apparent_source(ds.QT, 86400 * ds.FQT)


    ng = xr.Dataset({
        netprec_name: -integrate_moist(q2),
        pw_name: integrate_moist(ds.QT)
    })

    # semiprognostic
    predicted_srcs = model.predict(ds)
    ds = xr.Dataset({
        netprec_name: -integrate_moist(predicted_srcs['QT']),
        pw_name: integrate_moist(ds.QT)
    })
    
    return ng, ds
Exemple #4
0
def plot(data):
    p = open_data('pressure')

    fig, axs = plt.subplots(1, 4, sharex=True, sharey=True,
                            constrained_layout=True,
                            figsize=(common.textwidth, 2))
    axs.shape = (1, -1)
    abcd = 'abcd'

    m = common.get_vmax(data)
    kw = dict(levels=common.diverging_levels(25, 5), cmap='RdBu_r')
    axs[0,0].invert_yaxis()
    for k in range(4):
        v = data.isel(step=k).squeeze()
        # import pdb; pdb.set_trace()
        im = axs[0,k].contourf(
            v.time, p, v.T, **kw)

        axs[0,k].set_title(f"{abcd[k]}) {get_title(k)}", loc='left')
        # v.plot(col='step', x='time')


    plt.colorbar(im, ax=axs, orientation='horizontal',
                 shrink=.3, aspect=2)

#     axs[0,0].yaxis.set_major_locator(plt.MaxNLocator(4))

    common.label_outer_axes(axs, "day", "p (mb)")
Exemple #5
0
def get_data():

    variables = ['PW', 'net_precip']
    times = slice(100, 120)

    # open NN run
    run = runs['debias']
    nn = run.data_2d.rename({'NPNN': 'net_precip'})

    # open microphysics
    run = runs['micro']
    micro = run.data_2d
    micro['net_precip'] = micro.Prec - lhf_to_evap(micro.LHF)

    # open NGAqua
    ng = open_data('ngaqua_2d')
    ng['net_precip'] = ng.Prec - lhf_to_evap(ng.LHF)
    # make sure the x and y value agree
    ng = ng.assign(x=nn.x, y=nn.y)

    plotme = xr.concat(
        [ng[variables].interp(time=nn.time), nn[variables], micro[variables]],
        dim=['NG-Aqua', 'NN-Lower', 'Base'])

    return plotme.sel(time=times).mean('x')
Exemple #6
0
def get_data():
    ds = open_data('training')
    time = ds.time[0] + 2

    # compute expected_precip
    model = torch.load('../../nn/NNLower/5.pkl')
    data_at_time = ds.sel(time=time).load()
    neural_net_srcs = model.call_with_xr(data_at_time)
    semi_prognostic = -integrate_q2(neural_net_srcs['QT'], ds.layer_mass)

    # net precip from model
    net_precip = runs['debias'].data_2d.NPNN

    # net precip micro
    data_2d = runs['micro'].data_2d
    micro = data_2d.Prec - lhf_to_evap(data_2d.LHF)

    evap = lhf_to_evap(ds.LHF)
    net_precip_truth = ds.Prec - evap

    return xr.Dataset({
        'NG-Aqua': net_precip_truth.interp(time=time),
        'SemiProg': semi_prognostic.interp(time=time),
        'DEBIAS': net_precip.interp(time=time),
        'MICRO': micro.interp(time=time),
    }).compute()
Exemple #7
0
def get_data(start_time=100, end_time=120):


    ng = open_data('ngaqua_2d')
    debias = runs['debias'].data_2d
    
    time_slice = slice(start_time, end_time)
    
    #
    ng, semiprog = get_ng_and_semiprog()
    
    # select the data   
    debias = debias.sel(time=time_slice)
    debias = xr.Dataset({
        pw_name: debias.PW,
        netprec_name: debias.NPNN
    })
    
    # merge the data into dataframe
    ng = ng.to_dataframe().assign(run='NG-Aqua')
    debias = debias.to_dataframe().assign(run='NN-Lower')
    semiprog = semiprog.to_dataframe().assign(run='NN-Lower-semi')
    df = pd.concat([ng, debias, semiprog])
    
    return df.reset_index().set_index(['run', 'time', 'x', 'y'])
Exemple #8
0
def get_base_state_from_training_data():
    ds = open_data('training')

    mean = ds.isel(y=32, time=slice(0, 10)).mean(['x', 'time'])
    base_state = {}

    for key in ['SLI', 'QT', 'SOLIN', 'SST']:
        base_state[key] = xarray2torch(mean[key])
    return base_state
Exemple #9
0
def get_error(model):
    from src.data import open_data
    data = open_data("training")
    data = data.isel(time=slice(0, 100)).compute()

    srcs = model.predict(data)

    q2 = compute_apparent_source(data.QT, data.FQT * 86400)
    q1 = compute_apparent_source(data.SLI, data.FSLI * 86400)

    return xr.Dataset({
        'QT': q2 - srcs.QT,
        'SLI': q1 - srcs.SLI
    }).dropna('time')
Exemple #10
0
def get_ng():
    ds = open_data('training').sel(time=slice(100, 115), y=slice(4.5e6, 5.5e6))

    def integrate_moist(src):
        return (src * ds.layer_mass).sum('z') / 1000

    q2 = compute_apparent_source(ds.QT, 86400 * ds.FQT)

    ng = xr.Dataset({
        netprec_name: -integrate_moist(q2),
        pw_name: integrate_moist(ds.QT)
    })

    return ng.to_dataframe()
Exemple #11
0
def get_precipitation():
    net_precip_nn = runs['debias'].data_2d.NPNN

    ds = open_data('training')
    evap = lhf_to_evap(ds.LHF)
    net_precip_truth = ds.Prec - evap

    dsm = runs['micro'].data_2d
    net_precip_control = dsm.Prec - lhf_to_evap(dsm.LHF)

    time = net_precip_nn.time

    return (net_precip_truth.interp(time=time), net_precip_nn,
            net_precip_control)
Exemple #12
0
def get_data():

    ds = open_data('training')

    run = runs['debias']
    nn = run.data_3d

    common_variables = list(set(nn.data_vars) & set(ds.data_vars))
    plotme = xr.concat([avg(nn[common_variables]),
                        avg(ds[common_variables])],
                       dim=['NN', 'NG-Aqua'])

    length_domain = 160e3 * len(ds.x)
    plotme['stream_function'] = (plotme.V *
                                 ds.layer_mass[0]).cumsum('z') * length_domain

    plotme = plotme.assign(p=plotme.p[0]).swap_dims({'z': 'p'})

    return plotme
Exemple #13
0
def get_data(model="../../nn/277/1.pkl", **kwargs):
    # open model and training data
    model = torch.load(model)
    ds = open_data('training')

    # select x=0, y=32
    index = {'x': 0, 'y': 32}
    index = {key: slice(val, val + 1) for key, val in index.items()}
    location = ds.isel(**index)

    # run the single column models
    merged = predict_for_each_time(model, location, **kwargs)

    # compute Q2 for comparison
    location_subset = location.sel(time=merged.time)
    q2 = compute_apparent_source(location_subset.QT,
                                 location_subset.FQT * 86400)
    plotme = xr.concat(
        [q2.assign_coords(step='Truth'), merged.FQTNN], dim='step')

    return plotme
Exemple #14
0
def get_data():

    # open model
    model = torch.load(args.model)

    # get data
    ds = open_data('training').sel(time=slice(100, 115), y=slice(4.5e6, 5.5e6))
    usrf = np.sqrt(ds.U.isel(z=0)**2 + ds.V.isel(z=0)**2)
    ds['surface_wind_speed'] = usrf

    def integrate_moist(src):
        return (src * ds.layer_mass).sum('z') / 1000

    # semiprognostic
    predicted_srcs = model.predict(ds)
    ds = xr.Dataset({
        netprec_name: -integrate_moist(predicted_srcs['QT']),
        pw_name: integrate_moist(ds.QT)
    })

    return ds.to_dataframe()
Exemple #15
0
def get_data():
    model = common.get_model('NN-Lower')
    ds = open_data('training')\
               .chunk({'time': 1})\
               .pipe(assign_apparent_sources)

    outputs = map_dataset(ds, model.call_with_xr, 'time')

    for key in outputs:
        ds['F' + key + 'NN'] = outputs[key]

    output = xr.Dataset()

    output['net_moist_nn'] = integrate_q2(ds['FQTNN'], ds.layer_mass)
    output['net_heat_nn'] = integrate_q1(ds['FSLINN'], ds.layer_mass)
    output['net_moist'] = integrate_q2(ds['Q2'], ds.layer_mass)
    output['net_heat'] = integrate_q1(ds['Q1'], ds.layer_mass)
    output['Q1'] = ds['Q1']
    output['Q2'] = ds['Q2']
    output['Q1nn'] = ds['FSLINN']
    output['Q2nn'] = ds['FQTNN']

    return output
Exemple #16
0
def get_data():
    ds = open_data('training')
    variables = ['QT', 'QP', 'SLI', 'U', 'V']
    return ds[variables].apply(compute_acf)
Exemple #17
0
def get_wave_from_training_data(src=None):
    ds = open_data('training')
    mean = ds.isel(y=32, time=slice(0, 10)).mean(['x', 'time'])
    return wave_from_xarray(mean, src)
Exemple #18
0
def get_plot_manager(model):
    from src.data import open_data
    dataset = open_data('training')
    return TrainingPlotManager(ex, model, dataset)
Exemple #19
0
def get_time_series_data():
    ds = open_data('training')
    index = {'x': 0, 'y': 32, 'z': 20}
    location = ds.isel(**index)
    return location.to_dataframe()
Exemple #20
0
def to_pressure_dim(ds):
    from src.data import open_data
    return ds.assign_coords(p=open_data('pressure')).swap_dims({'z': 'p'})
Exemple #21
0
from jacobian import *
from src.data import open_data
import common
import pandas as pd

# bootstrap sample size
n = 20
hatch_threshold = 10

# compute jacobians
training = open_data('training')
training['region'] = common.get_regions(training.y)
tropics = training.isel(y=slice(30,34)).load()
tropics['time_of_day'] = tropics.time % 1
p = tropics.p[0].values

model = common.get_model('NN-All')
samples = list(bootstrap_samples(tropics, n))
jacobians = [get_jacobian(model, sample) for sample in samples]

# make plot
fig, axs = plt.subplots(
    4, 5, figsize=(common.textwidth, common.textwidth-2), sharex=True, sharey=True)
plt.rcParams['hatch.color'] = '0.5'

axs[0,0].invert_yaxis()
axs[0,0].invert_xaxis()
norm = SymLogNorm(1, 2, vmin=-1e5, vmax=1e5)

for ax, jac in zip(axs.flat, jacobians):
    qt_qt = jac['QT']['QT'].detach().numpy()