Exemplo n.º 1
0
              y_train,
              x_test=x_test,
              y_test=y_test,
              epochs=epochs,
              batch_size=64,
              verbose=1,
              patience=patience)
    t_used = timeit.default_timer() - t_start
    print('\nTime used for training CNN: {}'.format(t_used))
    # save cNN
    #     cnn.model.save(os.path.join(savepath,f_name+'.h5'))
    #     cnn.save(savepath,f_name)
    cnn.model.save_weights(os.path.join(savepath, f_name + '.h5'))

# select some gradients to evaluate and compare
logLik = lambda x: loglik(cnn.model(x))
import timeit
t_used = np.zeros(2)
import matplotlib.pyplot as plt
fig, axes = plt.subplots(nrows=1,
                         ncols=2,
                         sharex=True,
                         sharey=True,
                         figsize=(12, 6),
                         facecolor='white')
plt.ion()
n_dif = 100
dif = np.zeros((n_dif, 2))
loaded = np.load(file=os.path.join(
    folder, algs[alg_no] + '_ensbl' + str(ensbl_sz) + '_training_XimgY' +
    '.npz'))
Exemplo n.º 2
0
    'latent': tf.keras.layers.PReLU(),
    'output': 'linear'
}
latent_dim = 128
droprate = .5
optimizer = tf.keras.optimizers.Adam(learning_rate=0.001)
cnn = CNN(x_train.shape[1:],
          y_train.shape[1],
          num_filters=num_filters,
          latent_dim=latent_dim,
          droprate=droprate,
          activations=activations,
          optimizer=optimizer)
f_name = 'cnn_' + algs[alg_no] + str(ensbl_sz)
try:
    cnn.model = load_model(os.path.join(folder, f_name + '.h5'),
                           custom_objects={'loss': None})
    #     cnn.model.load_weights(os.path.join(folder,f_name+'.h5'))
    print(f_name + ' has been loaded!')
except Exception as err:
    print(err)
    print('Train CNN...\n')
    epochs = 200
    patience = 0
    import timeit
    t_start = timeit.default_timer()
    cnn.train(x_train,
              y_train,
              x_test=x_test,
              y_test=y_test,
              epochs=epochs,
              batch_size=64,
Exemplo n.º 3
0
    'output': 'linear'
}
latent_dim = 128
droprate = .25
optimizer = tf.keras.optimizers.Adam(learning_rate=0.001)
cnn = CNN(x_train,
          y_train,
          num_filters=num_filters,
          x_test=x_test,
          y_test=y_test,
          latent_dim=latent_dim,
          activations=activations,
          droprate=droprate,
          optimizer=optimizer)
try:
    cnn.model = load_model(os.path.join(folder, 'cnn_' + algs[alg_no] + '.h5'))
    print('cnn_' + algs[alg_no] + '.h5' + ' has been loaded!')
except Exception as err:
    print(err)
    print('Train CNN...\n')
    epochs = 100
    import timeit
    t_start = timeit.default_timer()
    cnn.train(epochs, batch_size=64, verbose=1)
    t_used = timeit.default_timer() - t_start
    print('\nTime used for training CNN: {}'.format(t_used))
    # save CNN
    #     cnn.model.save('./result/cnn_model.h5')
    cnn.save(folder, 'cnn_' + algs[alg_no])
    # how to laod model
#     from tensorflow.keras.models import load_model