예제 #1
0
sample = model.get_sample(10)
print(sample)

# Print samples from single variable
x_sample = x.get_sample(10)
print(x_sample)

# Print samples conditional on an input
in_sample = model.get_sample(10, input_values={mu: 100.})
print(in_sample)

# # Generate data
data = x_real._get_sample(number_samples=50)

# Observe data
x.observe(data[x_real][:, 0, :])

# Variational model
Qnu = LogNormalVariable(0., 1., "nu", learnable=True)
Qmu = NormalVariable(0., 1., "mu", learnable=True)
model.set_posterior_model(ProbabilisticModel([Qmu, Qnu]))

# Inference
inference.stochastic_variational_inference(model,
                                           number_iterations=100,
                                           number_samples=50,
                                           optimizer=chainer.optimizers.Adam(0.1))
loss_list = model.diagnostics["loss curve"]

# print posterior sample
post_samples = model.get_posterior_sample(10)
예제 #2
0
from brancher.inference import ReverseKL
from brancher.gradient_estimators import BlackBoxEstimator, Taylor1Estimator

#Model
z1 = BernulliVariable(logits=0., name="z1")
z2 = BernulliVariable(logits=0., name="z2")
y = NormalVariable(2 * z1 + z2, 1., name="y")
model = ProbabilisticModel([y])

#Generate data
data = y.get_sample(20, input_values={z1: 1, z2: 0})
data.hist(bins=20)
plt.show()

#Observe data
y.observe(data)

#Variational Model
Qz1 = BernulliVariable(logits=0., name="z1", learnable=True)
Qz2 = BernulliVariable(logits=0., name="z2", learnable=True)
variational_model = ProbabilisticModel([Qz1, Qz2])
model.set_posterior_model(variational_model)

# Joint-contrastive inference
inference.perform_inference(
    model,
    inference_method=ReverseKL(gradient_estimator=Taylor1Estimator),
    number_iterations=600,
    number_samples=20,
    optimizer="SGD",
    lr=0.001)
예제 #3
0
from brancher.standard_variables import NormalVariable, DeterministicVariable, LogNormalVariable
import brancher.functions as BF
from brancher.visualizations import plot_density
from brancher.transformations import PlanarFlow
from brancher import inference
from brancher.visualizations import plot_posterior

# Model
M = 8
y = NormalVariable(torch.zeros((M, )), 1. * torch.ones((M, )), "y")
y0 = DeterministicVariable(y[1], "y0")
d = NormalVariable(y, torch.ones((M, )), "d")
model = ProbabilisticModel([d, y, y0])

# get samples
d.observe(d.get_sample(55, input_values={y: 1. * torch.ones((M, ))}))

# Variational distribution
u1 = DeterministicVariable(torch.normal(0., 1., (M, 1)), "u1", learnable=True)
w1 = DeterministicVariable(torch.normal(0., 1., (M, 1)), "w1", learnable=True)
b1 = DeterministicVariable(torch.normal(0., 1., (1, 1)), "b1", learnable=True)
u2 = DeterministicVariable(torch.normal(0., 1., (M, 1)), "u2", learnable=True)
w2 = DeterministicVariable(torch.normal(0., 1., (M, 1)), "w2", learnable=True)
b2 = DeterministicVariable(torch.normal(0., 1., (1, 1)), "b2", learnable=True)
z = NormalVariable(torch.zeros((M, 1)),
                   torch.ones((M, 1)),
                   "z",
                   learnable=True)
Qy = PlanarFlow(w2, u2, b2)(PlanarFlow(w1, u1, b1)(z))
Qy.name = "y"
Qy0 = DeterministicVariable(Qy[1], "y0")
예제 #4
0
mu = NormalVariable(0., 10., "mu")
x = NormalVariable(mu, nu, "x")
model = ProbabilisticModel(
    [x])  # to fix plot_posterior (flatten automatically?)

# # Generate data
nu_real = 1.
mu_real = -2.
data = model.get_sample(number_samples=20,
                        input_values={
                            mu: mu_real,
                            nu: nu_real
                        })

# Observe data
x.observe(data)

# Variational model
Qnu = LogNormalVariable(0., 1., "nu", learnable=True)
Qmu = NormalVariable(0., 1., "mu", learnable=True)
model.set_posterior_model(ProbabilisticModel([Qmu, Qnu]))

# Inference
inference.perform_inference(model,
                            number_iterations=300,
                            number_samples=100,
                            optimizer='SGD',
                            lr=0.0001)
loss_list = model.diagnostics["loss curve"]

plt.plot(loss_list)
model = ProbabilisticModel([a, b])

# Variational model
Qa = TruncatedNormalVariable(mu=1.,
                             sigma=0.25,
                             truncation_rule=lambda x: x > 0.1,
                             name="a",
                             learnable=True)
variational_model = ProbabilisticModel([Qa])
model.set_posterior_model(variational_model)

# # Generate data
num_observations = 10
data = b.get_sample(number_samples=num_observations, input_values={a: 1.})

# Observe data
b.observe(data)

# Inference
inference.perform_inference(model,
                            number_iterations=500,
                            number_samples=50,
                            optimizer=chainer.optimizers.Adam(0.025))
loss_list = model.diagnostics["loss curve"]

plt.plot(loss_list)
plt.show()

plot_posterior(model, variables=["a", "b"])
plt.show()
x0 = Normal(0, 0.5, "x_0")
X = MarkovProcess(x0, lambda t, x: Normal(x, 0.2, "x_{}".format(t)))

## Create observation model ##
Y = Normal(X, 1., "y")

## Sample ##
num_timepoints = 30
temporal_sample = Y.get_timeseries_sample(1, query_points=num_timepoints)
temporal_sample.plot()
plt.show()

## Observe model ##
data = temporal_sample
query_points = range(num_timepoints)
Y.observe(data, query_points)

## Variational model
Qx0 = Normal(0, 0.5, "x_0")
QX = [Qx0]
for idx in range(1, 30):
    QX.append(
        Normal(QX[idx - 1],
               0.25,
               "x_{}".format(idx),
               has_bias=True,
               learnable=True))
QX = ProbabilisticModel(QX)

## Perform ML inference ##
perform_inference(Y,
예제 #7
0
import brancher.functions as BF
from brancher.visualizations import plot_density
from brancher.transformations import Exp, Scaling, TriangularLinear, Sigmoid, Bias
from brancher import inference
from brancher.visualizations import plot_posterior

# Model
M = 2
y = NormalVariable(torch.zeros((M, )), 1. * torch.ones((M, )), "y")
y0 = DeterministicVariable(y[0], "y0")
y1 = DeterministicVariable(y[1], "y1")
d = NormalVariable(y**2, torch.ones((M, )), "d")
model = ProbabilisticModel([d, y, y0])

# get samples
d.observe(d.get_sample(25, input_values={y: 0.3 * torch.ones((M, ))}))

# Variational distribution
N = int(M * (M + 1) / 2)
v1 = DeterministicVariable(np.random.normal(0., 0.1, (N, )),
                           "v1",
                           learnable=True)
v2 = DeterministicVariable(np.random.normal(0., 0.1, (N, )),
                           "v2",
                           learnable=True)
b1 = DeterministicVariable(np.random.normal(0., 0.1, (M, 1)),
                           "b1",
                           learnable=True)
w1 = DeterministicVariable(np.random.normal(0., 0.1, (N, )),
                           "w1",
                           learnable=True)