Ejemplo n.º 1
0
x_real = NormalVariable(mu_real, nu_real, "x_real")

# Normal model
nu = LogNormalVariable(0., 1., "nu")
mu = NormalVariable(0., 10., "mu")
x = NormalVariable(mu, nu, "x")
model = ProbabilisticModel([x])

print(model)

# Print samples
sample = model.get_sample(10)
print(sample)

# Print samples from single variable
x_sample = x.get_sample(10)
print(x_sample)

# Print samples conditional on an input
in_sample = model.get_sample(10, input_values={mu: 100.})
print(in_sample)

# # Generate data
data = x_real._get_sample(number_samples=50)

# Observe data
x.observe(data[x_real][:, 0, :])

# Variational model
Qnu = LogNormalVariable(0., 1., "nu", learnable=True)
Qmu = NormalVariable(0., 1., "mu", learnable=True)
Ejemplo n.º 2
0
from brancher.variables import ProbabilisticModel
from brancher.standard_variables import BernulliVariable, NormalVariable
import brancher.functions as BF
from brancher import inference
from brancher.inference import ReverseKL
from brancher.gradient_estimators import BlackBoxEstimator, Taylor1Estimator

#Model
z1 = BernulliVariable(logits=0., name="z1")
z2 = BernulliVariable(logits=0., name="z2")
y = NormalVariable(2 * z1 + z2, 1., name="y")
model = ProbabilisticModel([y])

#Generate data
data = y.get_sample(20, input_values={z1: 1, z2: 0})
data.hist(bins=20)
plt.show()

#Observe data
y.observe(data)

#Variational Model
Qz1 = BernulliVariable(logits=0., name="z1", learnable=True)
Qz2 = BernulliVariable(logits=0., name="z2", learnable=True)
variational_model = ProbabilisticModel([Qz1, Qz2])
model.set_posterior_model(variational_model)

# Joint-contrastive inference
inference.perform_inference(
    model,
Ejemplo n.º 3
0
import numpy as np

from brancher import functions as BF

from brancher.standard_variables import NormalVariable as Normal
from brancher.standard_variables import DeterministicVariable

in_channels = 4
out_channels = 5
a = Normal(loc=np.zeros((in_channels, 28, 28)),
           scale=1.,
           name="a")
W = Normal(loc=np.zeros((out_channels, in_channels, 3, 3)),
           scale=np.ones((out_channels, in_channels, 3, 3)),
           name="W")
y = Normal(BF.conv2d(a, W), 0.1, name="y")

samples = y.get_sample(9)["y"]
print(samples[0].shape)
print(len(samples))
Ejemplo n.º 4
0
from brancher.standard_variables import NormalVariable, DeterministicVariable, LogNormalVariable
import brancher.functions as BF
from brancher.visualizations import plot_density
from brancher.transformations import PlanarFlow
from brancher import inference
from brancher.visualizations import plot_posterior

# Model
M = 8
y = NormalVariable(torch.zeros((M, )), 1. * torch.ones((M, )), "y")
y0 = DeterministicVariable(y[1], "y0")
d = NormalVariable(y, torch.ones((M, )), "d")
model = ProbabilisticModel([d, y, y0])

# get samples
d.observe(d.get_sample(55, input_values={y: 1. * torch.ones((M, ))}))

# Variational distribution
u1 = DeterministicVariable(torch.normal(0., 1., (M, 1)), "u1", learnable=True)
w1 = DeterministicVariable(torch.normal(0., 1., (M, 1)), "w1", learnable=True)
b1 = DeterministicVariable(torch.normal(0., 1., (1, 1)), "b1", learnable=True)
u2 = DeterministicVariable(torch.normal(0., 1., (M, 1)), "u2", learnable=True)
w2 = DeterministicVariable(torch.normal(0., 1., (M, 1)), "w2", learnable=True)
b2 = DeterministicVariable(torch.normal(0., 1., (1, 1)), "b2", learnable=True)
z = NormalVariable(torch.zeros((M, 1)),
                   torch.ones((M, 1)),
                   "z",
                   learnable=True)
Qy = PlanarFlow(w2, u2, b2)(PlanarFlow(w1, u1, b1)(z))
Qy.name = "y"
Qy0 = DeterministicVariable(Qy[1], "y0")
                            name="a")
b = NormalVariable(mu=a, sigma=a**2, name="b")
model = ProbabilisticModel([a, b])

# Variational model
Qa = TruncatedNormalVariable(mu=1.,
                             sigma=0.25,
                             truncation_rule=lambda x: x > 0.1,
                             name="a",
                             learnable=True)
variational_model = ProbabilisticModel([Qa])
model.set_posterior_model(variational_model)

# # Generate data
num_observations = 10
data = b.get_sample(number_samples=num_observations, input_values={a: 1.})

# Observe data
b.observe(data)

# Inference
inference.perform_inference(model,
                            number_iterations=500,
                            number_samples=50,
                            optimizer=chainer.optimizers.Adam(0.025))
loss_list = model.diagnostics["loss curve"]

plt.plot(loss_list)
plt.show()

plot_posterior(model, variables=["a", "b"])
Ejemplo n.º 6
0
from brancher.standard_variables import NormalVariable as Normal
from brancher.standard_variables import CategoricalVariable as Categorical

in_channels = 1
out_channels = 5
image_size = 28
#x = Normal(loc=np.zeros((in_channels, image_size, image_size)),
#           scale=1.,
#           name="x")
Wk = Normal(loc=np.zeros((out_channels, in_channels, 3, 3)),
            scale=1. * np.ones((out_channels, in_channels, 3, 3)),
            name="Wk")
z = Normal(BF.conv2d(x, Wk, padding=1), 1., name="z")

num_samples = 6
z.get_sample(num_samples)["z"]

num_classes = 10
Wl = Normal(loc=np.zeros(
    (num_classes, image_size * image_size * out_channels)),
            scale=1. * np.ones(
                (num_classes, image_size * image_size * out_channels)),
            name="Wl")
b = Normal(loc=np.zeros((num_classes, 1)),
           scale=1. * np.ones((num_classes, 1)),
           name="b")
reshaped_z = BF.reshape(z, shape=(image_size * image_size * out_channels, 1))
k = Categorical(logits=BF.linear(reshaped_z, Wl, b), name="k")

k.observe(labels)
Ejemplo n.º 7
0
import brancher.functions as BF
from brancher.visualizations import plot_density
from brancher.transformations import Exp, Scaling, TriangularLinear, Sigmoid, Bias
from brancher import inference
from brancher.visualizations import plot_posterior

# Model
M = 2
y = NormalVariable(torch.zeros((M, )), 1. * torch.ones((M, )), "y")
y0 = DeterministicVariable(y[0], "y0")
y1 = DeterministicVariable(y[1], "y1")
d = NormalVariable(y**2, torch.ones((M, )), "d")
model = ProbabilisticModel([d, y, y0])

# get samples
d.observe(d.get_sample(25, input_values={y: 0.3 * torch.ones((M, ))}))

# Variational distribution
N = int(M * (M + 1) / 2)
v1 = DeterministicVariable(np.random.normal(0., 0.1, (N, )),
                           "v1",
                           learnable=True)
v2 = DeterministicVariable(np.random.normal(0., 0.1, (N, )),
                           "v2",
                           learnable=True)
b1 = DeterministicVariable(np.random.normal(0., 0.1, (M, 1)),
                           "b1",
                           learnable=True)
w1 = DeterministicVariable(np.random.normal(0., 0.1, (N, )),
                           "w1",
                           learnable=True)