steps_etopo = L_etopo // batch_size

cut, cut_etopo = 100, 300
map_sizes = [64, 96, 128]
c_size = 4
labels = ['X', 'Y']
flag_c = [True] * c_size
save_dir = '/glade/work/ksha/data/Keras/BACKUP/'
for sea in seasons:
    print(sea)
    # validation data
    valid_sub = glob(BATCH_dir +
                     '{}*BATCH*{}*SUB_{}*.npy'.format(VAR, kind, sea))
    gen_valid_sub = ku.grid_grid_gen(valid_sub,
                                     batch_size,
                                     c_size,
                                     m_size,
                                     labels,
                                     flag=flag_c)
    # --------------------------------------------------------------------------------------- #
    # ===== step 1: elev. tune ===== #
    record_t = 120
    max_tol = 5
    tol = 0
    for n in range(20):
        print('====================')
        if n == 0:
            model_name = 'UNET_{}_B4_{}.hdf'.format(VAR, sea)
            unet_backbone = keras.models.load_model(save_dir + model_name)
            unet_tune2 = au.UNET_AE(layer_N, c_size, dropout=False)
            unet_tune2 = freeze_unet2(unet_tune2, l=0, lr=1e-6)
            unet_tune2.set_weights(unet_backbone.get_weights())
VAR = 'TMIN'
c_size = 4; norm = 'STD'; kind = 'std_clim'; batch_num = 200
layer_N = [56, 112, 224, 448]
lr = [5e-5, 5e-6]
# data pipeline setup
L_train, batch_size, m_size = 800, 1, batch_num
steps = L_train//batch_size
labels = ['X', 'Y']; flag_c = [True]*c_size

seasons = ['djf', 'mam', 'jja', 'son']
for sea in seasons:
    print('========== {} =========='.format(sea))
    # validation batches
    valid_files = glob(BATCH_dir+'{}*BATCH*{}*VORI_{}*.npy'.format(VAR, kind, sea))
    shuffle(valid_files)
    gen_valid = ku.grid_grid_gen(valid_files, batch_size, c_size, m_size, labels, flag=flag_c)
    # model import
    save_dir = '/glade/work/ksha/data/Keras/BACKUP/'
    model_name = 'UNET_{}_B4_{}'.format(VAR, sea)
    model_name_tune = 'UNET_{}_B4_{}_tune_train'.format(VAR, sea)
    model = keras.models.load_model(save_dir+model_name+'.hdf') # restart options
    ## opt
    opt_adam = keras.optimizers.SGD(lr=5e-5, decay=0.01)
    ## callback
    callbacks = [keras.callbacks.EarlyStopping(monitor='val_HR_temp_loss', min_delta=0.0000001, patience=3, verbose=True),
                 keras.callbacks.ModelCheckpoint(filepath=save_dir+model_name_tune+'.hdf', verbose=True,
                                                 monitor='val_HR_temp_loss', save_best_only=True)]
    model.compile(loss=keras.losses.mean_absolute_error, optimizer=opt_adam, metrics=[keras.losses.mean_absolute_error])
    train_files = glob(BATCH_dir+'{}*BATCH*{}*TORI_{}*.npy'.format(VAR, kind, sea))
    shuffle(train_files)
    gen_train = ku.grid_grid_gen(train_files, batch_size, c_size, m_size, labels, flag=flag_c)
Ejemplo n.º 3
0
for VAR in VARS:
    print(VAR)
    model_unet_a = keras.models.load_model(save_dir +
                                           'UNET_{}_A_std'.format(VAR) +
                                           '.hdf')
    model_unet_c = keras.models.load_model(save_dir +
                                           'UNET_{}_B_tune'.format(VAR) +
                                           '.hdf')

    train_files = glob(BATCH_dir + '{}*BATCH*128*TORI[0-9]*.npy'.format(VAR))
    L_train, batch_size, m_size = len(train_files), 1, 200
    steps = L_train // batch_size

    train_gen = ku.grid_grid_gen(train_files,
                                 batch_size,
                                 c_size,
                                 m_size,
                                 labels,
                                 flag=flag_c)
    train_gen_LRT = grid_grid_gen(train_files,
                                  batch_size,
                                  c_size,
                                  m_size,
                                  labels,
                                  0,
                                  flag=flag_c)
    train_gen_HRZ = grid_grid_gen(train_files,
                                  batch_size,
                                  c_size,
                                  m_size,
                                  labels,
                                  1,