コード例 #1
0
mu_real = -2.
data = model.get_sample(number_samples=20,
                        input_values={
                            mu: mu_real,
                            nu: nu_real
                        })

# Observe data
x.observe(data)

# Variational model
Qnu = LogNormalVariable(0., 1., "nu", learnable=True)
Qmu = NormalVariable(0., 1., "mu", learnable=True)
model.set_posterior_model(ProbabilisticModel([Qmu, Qnu]))

# Inference
inference.perform_inference(model,
                            number_iterations=300,
                            number_samples=100,
                            optimizer='SGD',
                            lr=0.0001)
loss_list = model.diagnostics["loss curve"]

plt.plot(loss_list)
plt.title("Loss (negative ELBO)")
plt.show()

from brancher.visualizations import plot_posterior

plot_posterior(model, variables=["mu", "nu", "x"])
plt.show()
コード例 #2
0
# betaNormal/Binomial model
number_tosses = 1
p = BetaVariable(1., 1., "p")
k = BinomialVariable(number_tosses, probs=p, name="k")
model = ProbabilisticModel([k, p])

# Generate data
p_real = 0.8
data = model.get_sample(number_samples=30, input_values={p: p_real})

# Observe data
k.observe(data)

# Inference
inference.perform_inference(model,
                            number_iterations=1000,
                            number_samples=500,
                            lr=0.1,
                            optimizer='SGD')
loss_list = model.diagnostics["loss curve"]

#Plot loss
plt.plot(loss_list)
plt.title("Loss (negative ELBO)")
plt.show()

#Plot posterior
plot_posterior(model, variables=["p"])
plt.show()
コード例 #3
0
# Observe
observable_data = sample[[x.name
                          for x in x_series] + [y.name for y in y_series]]
dynamic_causal_model.observe(observable_data)

# Variational model
Qa = LogNormalVariable(0., 0.5, name="a", learnable=True)
Qb = LogNormalVariable(0., 0.5, name="b", learnable=True)
Qc = NormalVariable(0., 0.1, name="c", learnable=True)
Qd = NormalVariable(0., 0.1, name="d", learnable=True)
Qe = NormalVariable(0., 5., name="e", learnable=True)
Qxi = LogNormalVariable(0.1, 0.1, name="xi", learnable=True)
Qchi = LogNormalVariable(0.1, 0.1, name="chi", learnable=True)
variational_posterior = ProbabilisticModel([Qa, Qb, Qc, Qd, Qe, Qxi, Qchi])
dynamic_causal_model.set_posterior_model(variational_posterior)

# Inference #
inference.perform_inference(dynamic_causal_model,
                            number_iterations=100,
                            number_samples=5,
                            optimizer='Adam',
                            lr=0.01)
loss_list = dynamic_causal_model.diagnostics["loss curve"]
plt.plot(loss_list)
plt.show()

# Plot posterior
plot_posterior(dynamic_causal_model,
               variables=["a", "b", "c", "d", "e", "xi", "chi"])
plt.show()
コード例 #4
0
model = ProbabilisticModel([a, b])

# Variational model
Qa = TruncatedNormalVariable(mu=1.,
                             sigma=0.25,
                             truncation_rule=lambda x: x > 0.1,
                             name="a",
                             learnable=True)
variational_model = ProbabilisticModel([Qa])
model.set_posterior_model(variational_model)

# # Generate data
num_observations = 10
data = b.get_sample(number_samples=num_observations, input_values={a: 1.})

# Observe data
b.observe(data)

# Inference
inference.perform_inference(model,
                            number_iterations=500,
                            number_samples=50,
                            optimizer=chainer.optimizers.Adam(0.025))
loss_list = model.diagnostics["loss curve"]

plt.plot(loss_list)
plt.show()

plot_posterior(model, variables=["a", "b"])
plt.show()
コード例 #5
0
sample = model.get_sample(15,
                          input_values={
                              mu_x: 1.,
                              mu_y: 2.,
                              v: 0.3,
                              nu: 0.1
                          })[["x", "y", "w1", "w2", "b", "response"]]
model.observe(sample)

# Variational model
Qmu_x = NormalVariable(0., 1., name="mu_x", learnable=True)
Qmu_y = NormalVariable(0., 1., name="mu_y", learnable=True)
Qv = LogNormalVariable(0., 0.1, name="v", learnable=True)
Qnu = LogNormalVariable(-1, 0.01, name="nu", learnable=True)
variational_posterior = ProbabilisticModel([Qmu_x, Qmu_y, Qv, Qnu])
model.set_posterior_model(variational_posterior)

# Inference #
inference.perform_inference(model,
                            number_iterations=1500,
                            number_samples=50,
                            optimizer='Adam',
                            lr=0.01)
loss_list = model.diagnostics["loss curve"]
plt.plot(loss_list)
plt.show()

# Plot posterior
plot_posterior(model, variables=["mu_x", "mu_y", "v"])
plt.show()
コード例 #6
0
cov = Harmonic(frequency=freq)*SquaredExponential(scale=length_scale) + WhiteNoise(magnitude=noise_var)
f = GP(mu, cov, name="f")
y = f(x)
model = ProbabilisticModel([y])

# Observe data
noise_level = 0.2
f1 = 1.
data = np.sin(2*np.pi*f1*x_range) + noise_level*np.random.normal(0., 1., (1, num_datapoints))
y.observe(data)

#Variational Model
Qlength_scale = LogNormal(-1, 0.2, name="length_scale", learnable=True)
Qnoise_var = LogNormal(-1, 0.2, name="noise_var", learnable=True)
Qfreq = Normal(0.2, 0.2, name="freq", learnable=True)
variational_model = ProbabilisticModel([Qlength_scale, Qnoise_var, Qfreq])
model.set_posterior_model(variational_model)

# Inference
inference.perform_inference(model,
                            number_iterations=1500,
                            number_samples=10,
                            optimizer='SGD',
                            lr=0.0025)
loss_list = model.diagnostics["loss curve"]
plt.plot(loss_list)
plt.show()

# Posterior plot
plot_posterior(model, variables=["length_scale", "noise_var", "freq"])
plt.show()
コード例 #7
0
Qy1 = DeterministicVariable(Qy[1], "y1")

variational_model = ProbabilisticModel([Qy, Qy0])
model.set_posterior_model(variational_model)

# Inference #
inference.perform_inference(model,
                            number_iterations=1500,
                            number_samples=50,
                            optimizer="Adam",
                            lr=0.001)

loss_list1 = model.diagnostics["loss curve"]

#Plot posterior
plot_posterior(model, variables=["y0"])
plt.show()

# Variational distribution
Qy = NormalVariable(torch.zeros((M, )),
                    0.5 * torch.ones((M, )),
                    "y",
                    learnable=True)
Qy0 = DeterministicVariable(Qy[0], "y0")
Qy1 = DeterministicVariable(Qy[1], "y1")

variational_model = ProbabilisticModel([Qy, Qy0])
model.set_posterior_model(variational_model)

# Inference #
inference.perform_inference(model,