likelihood="bernoulli") #train mu network run_auto_lr_range(train_loader, bae_model) bae_model.fit(train_loader, num_epochs=10) #for each model, evaluate and plot: bae_models = [bae_model] id_data_test = test_loader ood_data_list = [ood_loader] train_set_name = "CIFAR" #run evaluation test of model on ood data set run_test_model(bae_models=bae_models, id_data_test=test_loader, ood_data_list=ood_data_list, id_data_name=train_set_name, output_reshape_size=(32, 32, 3)) #experimental here x_test = get_sample_dataloader(test_loader)[0].cuda() x_ood = get_sample_dataloader(ood_loader)[0].cuda() test_latent = bae_model.predict_latent(x_test, transform_pca=False) ood_latent = bae_model.predict_latent(x_ood, transform_pca=False) plt.figure() plt.boxplot([test_latent[0].mean(1), ood_latent[0].mean(1)]) plt.figure() plt.boxplot([test_latent[1].mean(1), ood_latent[1].mean(1)])
last_activation="sigmoid") #symmetrical to encoder #combine them into autoencoder autoencoder = Autoencoder(encoder, decoder_mu) #convert into BAE-MCDropout bae_mcdropout = BAE_MCDropout(autoencoder=autoencoder, dropout_p=0.2, num_train_samples=5, num_samples=50, use_cuda=use_cuda, denoising_factor=noise_factor) #train mu network run_auto_lr_range(train_loader, bae_mcdropout) bae_mcdropout.fit(train_loader, num_epochs=1) #for each model, evaluate and plot: bae_models = [bae_mcdropout] id_data_test = test_loader ood_data_list = [ood_loader, noisy_loader] train_set_name = "FashionMNIST" #run evaluation test of model on ood data set run_test_model(bae_models=bae_models, id_data_test=test_loader, ood_data_names=["OOD", "NOISY"], ood_data_list=ood_data_list, id_data_name=train_set_name, output_reshape_size=(28, 28))