Example #1
0
try:
    os.mkdir(folder_name)
except:
    pass
nsamples = 5
samples = torch.linspace(-20,20,nsamples).reshape(-1,1)
#Just initiate with large mean and covariance
mu0 = torch.zeros(dim)
cov0 = (20.0/3)**2*torch.ones(dim)
#%% The main training class is initiated, it's gp model optimized,
#   and initial component set
vb = VariationalBoosting(dim,logjoint,samples,mu0,cov0,
                         kernel_function="PMat",
                         matern_coef=2.5,
                         degree_hermite=60)
vb.optimize_bmc_model(maxiter=100,verbose=False)
#%% Tracking devices
vb.save_distribution("%s/distrib%i"%(folder_name,0))
nplot = 201
delta_x = torch.linspace(-20,20,nplot)
delta_x_np = delta_x.flatten().numpy()
tp_np = (logjoint(delta_x.reshape(-1,1)).cpu()).flatten().numpy()
prediction_np = (vb.bmcmodel.prediction(delta_x.reshape(-1,1),cov="none")*vb.evals_std + vb.evals_mean).\
                numpy().astype(float)        
dmeans = [torch.norm(vb.currentq_mean.to("cpu")-true_mean).numpy()]
dcovs = [torch.norm(vb.currentq_mean.to("cpu")-true_cov,2).numpy()]
elbo_list = [vb.evidence_lower_bound(nsamples=10000).cpu().numpy()]
step_list = [0]
time_list = [0]
vbp_list = []
vbp = vb.current_logq(delta_x.reshape(-1,1)).cpu().flatten().numpy().astype(float)
Example #2
0
 nsamples = 10 * dim
 samples = sampling.sampling1(nsamples, dim, scale=5.0, device=device)
 mu0 = torch.zeros(dim).to(device)
 cov0 = (20.0 / 3)**2 * torch.ones(dim).to(device)
 #%%
 training_interval = 20
 acquisitions = ["prospective", "mmlt"]
 vb = VariationalBoosting(dim,
                          logjoint,
                          samples,
                          mu0,
                          cov0,
                          kernel_function="PMat",
                          matern_coef=2.5,
                          degree_hermite=60)
 vb.optimize_bmc_model(maxiter=200)
 vb.update_full()
 #%%
 dmeans = [torch.norm(vb.currentq_mean.to("cpu") - true_mean).numpy()]
 dcovs = [torch.norm(vb.currentq_mean.to("cpu") - true_cov, 2).numpy()]
 mmds = [mmd_vb_sampler(vb, sampler, 100000)]
 weights = [vb.weights.cpu().numpy()]
 elbo_list = [vb.evidence_lower_bound(nsamples=10000).cpu().numpy()]
 step_list = [0]
 time_list = [0.0]
 for i in range(100):
     tictoc.tic()
     _ = vb.update(maxiter_nc=300,
                   lr_nc=0.01,
                   n_samples_nc=500,
                   n_samples_sn=300,
Example #3
0
training_interval = 20
acquisitions = ["prospective", "mmlt"]
vb = VariationalBoosting(dim,
                         logjoint,
                         samples,
                         mu0,
                         cov0,
                         bmc_type="FM",
                         normalization_mode="normalize",
                         training_space="gspace",
                         noise=1e-4,
                         kernel_function="PMat",
                         matern_coef=1.5,
                         numstab=-50.0,
                         degree_hermite=50)
vb.optimize_bmc_model(maxiter=500, verbose=1, lr=0.05)

#%%
elbo_list = [vb.evidence_lower_bound(nsamples=10000).cpu().numpy()]
kl_list = [vb.kullback_proposal_bmc(10000).item()]
step_list = [0]
time_list = [0.0]
#%%
print("Active sampling...")
for i in range(10 * dim):
    vb.update_bmcmodel(acquisition="mmlt", mode="optimizing", vreg=1e-2)
vb.update_full()
folder_name = "testheat1c"
try:
    os.mkdir(folder_name)
except FileExistsError:
Example #4
0
torch.manual_seed(100) #For reproducibility

#Approximating nnormalized 2-d Cauchy
def logjoint(theta):
    return torch.sum(-torch.log(1+theta**2),dim=-1)

#Set up parameters
dim=2 #Dimension of problem
samples = torch.randn(20,dim) #Initial samples
mu0 = torch.zeros(dim) #Initial mean
cov0 = 20.0*torch.ones(dim) #Initial covariance
acquisition = "prospective" #Acquisition function

#Initialize algorithm
vb = VariationalBoosting(dim,logjoint,samples,mu0,cov0)
vb.optimize_bmc_model() #Optimize GP model
vb.update_full() #Fit first component

#Training loop
for i in range(100):
    _ = vb.update() #Choose new boosting component
    vb.update_bmcmodel(acquisition=acquisition) #Choose new evaluation
    vb.cutweights(1e-3) #Weights prunning
    if ((i+1)%20) == 0:
        vb.update_full(cutoff=1e-3) #Joint parameter updating

vb.save_distribution("finaldistrib") #Save distribution
#%%
import math
distrib = torch.load("finaldistrib")
nplot = 21