Example #1
0
def training(hyperp, options, filepaths, data_dict, prior_dict):

    #=== GPU Settings ===#
    os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
    if options.distributed_training == 0:
        os.environ["CUDA_VISIBLE_DEVICES"] = options.which_gpu
    if options.distributed_training == 1:
        os.environ["CUDA_VISIBLE_DEVICES"] = options.dist_which_gpus
        gpus = tf.config.experimental.list_physical_devices('GPU')

    #=== Construct Validation Set and Batches ===#
    input_and_latent_train, input_and_latent_val, input_and_latent_test,\
    num_batches_train, num_batches_val, num_batches_test\
    = form_train_val_test_tf_batches(
            data_dict["state_obs_train"], data_dict["parameter_train"],
            data_dict["state_obs_test"], data_dict["parameter_test"],
            hyperp.batch_size, options.random_seed)

    #=== Data and Latent Dimensions of Autoencoder ===#
    input_dimensions = data_dict["obs_dimensions"]
    latent_dimensions = options.parameter_dimensions

    #=== Neural Network Regularizers ===#
    kernel_initializer = tf.keras.initializers.RandomNormal(mean=0.0,
                                                            stddev=0.05)
    bias_initializer = 'zeros'
    kernel_initializer_iaf = tf.keras.initializers.RandomNormal(mean=0.0,
                                                                stddev=0.05)
    bias_initializer_iaf = 'zeros'

    #=== Non-distributed Training ===#
    if options.distributed_training == 0:
        #=== Neural Network ===#
        nn = VAEIAF(hyperp, options, input_dimensions, latent_dimensions,
                    kernel_initializer, bias_initializer,
                    kernel_initializer_iaf, bias_initializer_iaf,
                    positivity_constraint_log_exp)

        #=== Optimizer ===#
        optimizer = tf.keras.optimizers.Adam()

        #=== Training ===#
        optimize(hyperp, options, filepaths, nn, optimizer,
                 input_and_latent_train, input_and_latent_val,
                 input_and_latent_test, input_dimensions, latent_dimensions,
                 num_batches_train, data_dict["noise_regularization_matrix"],
                 prior_dict["prior_mean"],
                 prior_dict["prior_covariance_cholesky_inverse"])

    #=== Distributed Training ===#
    if options.distributed_training == 1:
        dist_strategy = tf.distribute.MirroredStrategy()
        with dist_strategy.scope():
            #=== Neural Network ===#
            nn = VAEIAF(hyperp, options, input_dimensions, latent_dimensions,
                        kernel_initializer, bias_initializer,
                        positivity_constraint_log_exp)

            #=== Optimizer ===#
            optimizer = tf.keras.optimizers.Adam()

        #=== Training ===#
        optimize_distributed(dist_strategy, hyperp, options, filepaths, nn,
                             optimizer, input_and_latent_train,
                             input_and_latent_val, input_and_latent_test,
                             input_dimensions, latent_dimensions,
                             num_batches_train,
                             data_dict["noise_regularization_matrix"],
                             prior_dict["prior_mean"],
                             prior_dict["prior_covariance_cholesky_inverse"])
Example #2
0
def predict_and_plot(hyperp, options, filepaths):

    #=== Load Observation Indices ===#
    if options.obs_type == 'full':
        obs_dimensions = options.parameter_dimensions
    if options.obs_type == 'obs':
        obs_dimensions = options.num_obs_points
        print('Loading Boundary Indices')
        df_obs_indices = pd.read_csv(filepaths.project.obs_indices + '.csv')
        obs_indices = df_obs_indices.to_numpy()

    #=== Data and Latent Dimensions of Autoencoder ===#
    input_dimensions = obs_dimensions
    latent_dimensions = options.parameter_dimensions

    #=== Prepare Data ===#
    data = DataHandler(hyperp, options, filepaths,
                       options.parameter_dimensions, obs_dimensions)
    data.load_data_test()
    if options.add_noise == 1:
        data.add_noise_qoi_test()
    parameter_test = data.poi_test
    state_obs_test = data.qoi_test

    #=== Load Trained Neural Network ===#
    nn = VAEIAF(hyperp, options, input_dimensions, latent_dimensions, None,
                None, None, None, positivity_constraint_log_exp)
    nn.load_weights(filepaths.trained_nn)

    #=== Selecting Samples ===#
    sample_number = 105
    parameter_test_sample = np.expand_dims(parameter_test[sample_number, :], 0)
    state_obs_test_sample = np.expand_dims(state_obs_test[sample_number, :], 0)

    #=== Predictions ===#
    parameter_pred_sample, _ = nn.iaf_chain_posterior(
        nn.encoder(state_obs_test_sample))
    state_obs_pred_sample = nn.decoder(parameter_test_sample)
    parameter_pred_sample = parameter_pred_sample.numpy().flatten()
    state_obs_pred_sample = state_obs_pred_sample.numpy().flatten()

    #=== Plotting Prediction ===#
    print('================================')
    print('      Plotting Predictions      ')
    print('================================')
    #=== Load Mesh ===#
    nodes, elements, _, _, _, _, _, _ = load_mesh(filepaths.project)

    #=== Plot FEM Functions ===#
    plot_fem_function(filepaths.figures_savefile_name_parameter_test,
                      'True Parameter', 7.0, nodes, elements,
                      parameter_test_sample)
    plot_fem_function(filepaths.figures_savefile_name_parameter_pred,
                      'Parameter Prediction', 7.0, nodes, elements,
                      parameter_pred_sample)
    if options.obs_type == 'full':
        plot_fem_function(filepaths.figures_savefile_name_state_test,
                          'True State', 2.6, nodes, elements,
                          state_obs_test_sample)
        plot_fem_function(filepaths.figures_savefile_name_state_pred,
                          'State Prediction', 2.6, nodes, elements,
                          state_obs_pred_sample)

    print('Predictions plotted')