Esempio n. 1
0
import numpy as np
from cbrain.model_diagnostics import ModelDiagnostics
# Otherwise tensorflow will use ALL your GPU RAM for no reason
limit_mem()
TRAINDIR = '/local/Tom.Beucler/SPCAM_PHYS/'
DATADIR = '/project/meteo/w2w/A6/S.Rasp/SP-CAM/fluxbypass_aqua/'

import os
os.chdir('/filer/z-sv-pool12c/t/Tom.Beucler/SPCAM/CBRAIN-CAM')

config_fn = '/filer/z-sv-pool12c/t/Tom.Beucler/SPCAM/CBRAIN-CAM/pp_config/8col_rad_tbeucler_local_PostProc.yml'
dict_lay = {'SurRadLayer':SurRadLayer,'MassConsLayer':MassConsLayer,'EntConsLayer':EntConsLayer,\
           'weak_loss_0':mse,'weak_loss_1':mse}
data_fn_array = ['/local/Tom.Beucler/SPCAM_PHYS/8col009_01_valid.nc','/local/Tom.Beucler/SPCAM_PHYS/8col009_14_valid.nc']
dataref = ['','4K']
NNa = ['JUnotC']

for i,NNs in enumerate(NNa):
    NN = {}
    print('Loading model') # 1) Load model
    NN = load_model(TRAINDIR+'HDF5_DATA/'+NNs+'.h5',custom_objects=dict_lay)    
    for j in range(len(data_fn_array)):
        md = {}
        print('j=',j)
        print('Loading statistics') # 2) Define model diagnostics object
        md = ModelDiagnostics(NN,config_fn,data_fn_array[j])
        # 3) Calculate statistics and save in pickle file
        md.compute_stats()
        pickle.dump(md.stats,open(TRAINDIR+'HDF5_DATA/'+NNs+
                                   'md'+dataref[j]+'.pkl','wb'))
Esempio n. 2
0
    'EntConsLayer': EntConsLayer,
    'RH2QV': RH2QV,
    'dQVdt2dRHdt': dQVdt2dRHdt,
    'eliq': eliq,
    'eice': eice,
    'esat': esat,
    'qv': qv,
    'RH': RH
}

NN = {}
md = {}
os.chdir('/local/Tom.Beucler/SPCAM_PHYS/HDF5_DATA')

for i, NNs in enumerate(NNarray):
    print('NN name is ', NNs)
    path = path_HDF5 + NNs
    NN[NNs] = load_model(path, custom_objects=dict_lay)
    md[NNs] = {}
    for j, data in enumerate(data_file):
        print('data name is ', data)
        md[NNs][data[13:-3]] = ModelDiagnostics(
            NN[NNs],
            '/home/t/Tom.Beucler/SPCAM/CBRAIN-CAM/pp_config/' + config_file[i],
            '/local/Tom.Beucler/SPCAM_PHYS/' + data)
        md[NNs][data[13:-3]].compute_res()
        pickle.dump(
            md.res,
            open(TRAINDIR + 'HDF5_DATA/' + NNs + 'mdres' + dataref[j] + '.pkl',
                 'wb'))
Esempio n. 3
0
    'EntConsLayer': EntConsLayer
}

# tgb - 1/7/2020 - Only keeping last alphas that did not have time to run
#alpha_array = [0,0.01,0.25,0.5,0.75,0.99,1] # Loop over weight given to MSE and conservation constraints
#alpha_array = [0.75,0.99,1]
alpha_array = [0.5]
for alpha in alpha_array:
    print('alpha = ', str(alpha))
    NN = {}
    md = {}

    # 1) Load model
    path = TRAINDIR + 'HDF5_DATA/NNL' + str(alpha) + '.h5'
    NN = load_model(path, custom_objects=dict_lay)

    # 2) Define model diagnostics object
    md = ModelDiagnostics(NN, config_fn, data_fn)

    # 3) Calculate statistics and save in pickle file
    #     md.compute_stats()
    #     path = TRAINDIR+'HDF5_DATA/NNL'+str(alpha)+'md_test.pkl'
    #     pickle.dump(md.stats,open(path,'wb'))
    #     print('Stats are saved in ',path)

    # 4) Calculate budget residuals and save in pickle file
    md.compute_res()
    path = TRAINDIR + 'HDF5_DATA/NNL' + str(alpha) + 'res_test.pkl'
    pickle.dump(md.res, open(path, 'wb'))
    print('Budget residuals are saved in ', path)
Esempio n. 4
0
import numpy as np
from cbrain.model_diagnostics import ModelDiagnostics
# Otherwise tensorflow will use ALL your GPU RAM for no reason
limit_mem()
TRAINDIR = '/local/Tom.Beucler/SPCAM_PHYS/'
DATADIR = '/project/meteo/w2w/A6/S.Rasp/SP-CAM/fluxbypass_aqua/'

import os
os.chdir('/filer/z-sv-pool12c/t/Tom.Beucler/SPCAM/CBRAIN-CAM')

config_fn = '/filer/z-sv-pool12c/t/Tom.Beucler/SPCAM/CBRAIN-CAM/pp_config/8col_rad_tbeucler_local_PostProc.yml'
dict_lay = {'SurRadLayer':SurRadLayer,'MassConsLayer':MassConsLayer,'EntConsLayer':EntConsLayer}
data_fn_array = ['/local/Tom.Beucler/SPCAM_PHYS/8col009_01_valid.nc',
                 '/local/Tom.Beucler/SPCAM_PHYS/8col009_11_valid.nc',
                 '/local/Tom.Beucler/SPCAM_PHYS/8col009_12_valid.nc',
                 '/local/Tom.Beucler/SPCAM_PHYS/8col009_13_valid.nc',
                 '/local/Tom.Beucler/SPCAM_PHYS/8col009_14_valid.nc']
dataref = ['','1K','2K','3K','4K']

NN = {}
print('Loading model') # 1) Load model
NN = load_model(TRAINDIR+'HDF5_DATA/'+'NNA0.01'+'.h5',custom_objects=dict_lay)    
for j in range(len(data_fn_array)):
    md = {}
    print('j=',j)
    print('Loading statistics') # 2) Define model diagnostics object
    md = ModelDiagnostics(NN,config_fn,data_fn_array[j])
    # 3) Calculate statistics and save in pickle file
    md.compute_precipPDF()
    pickle.dump(md.precip,open(TRAINDIR+'HDF5_DATA/'+'NNA0.01'+
                               'mdprecip'+dataref[j]+'.pkl','wb'))
Esempio n. 5
0
coor.close()

config_fn = '/filer/z-sv-pool12c/t/Tom.Beucler/SPCAM/CBRAIN-CAM/pp_config/8col_rad_tbeucler_local_PostProc.yml'
data_fn = '/local/Tom.Beucler/SPCAM_PHYS/8col009_01_valid.nc'
dict_lay = {
    'SurRadLayer': SurRadLayer,
    'MassConsLayer': MassConsLayer,
    'EntConsLayer': EntConsLayer
}

NN = {}
os.chdir(TRAINDIR + '/HDF5_DATA')
path = TRAINDIR + 'HDF5_DATA/' + NNs + '.h5'
print('Loading neural network and model diagnostics object...')
NN = load_model(path, custom_objects=dict_lay)
md = ModelDiagnostics(NN, config_fn, data_fn)
NN.summary()

import h5py

print('Defining function...')


def get_RADCONjacobian(model, inp, md, ind):
    # model is the neural network model from inp to out
    # inp is the input x generator from the generator (object = gen_obj)
    # sample_index is the reference number of the sample for x
    # md is the model diagnostics object
    # ind is the indices over which the Jacobian is calculated
    # x.shape = (#sample,#inputs) so we are evaluating the gradient of
    # y(sample_index,:) with respect to x(sample_index,:)