コード例 #1
0
 def __call__(self, var):
     dot_output = BF.dot(self.w, var, reduce=False) + self.b
     output = var + self.u*BF.sigmoid(dot_output)
     d_sigmoid = lambda x: BF.sigmoid(x)*(1. - BF.sigmoid(x))
     psy = d_sigmoid(dot_output)*self.w
     log_det = -BF.log(BF.abs(1. + BF.dot(self.u, psy)) + self.shift)
     return DeterministicVariable(output,
                                  log_determinant=log_det,
                                  name="PlanarFlow {}".format(var.name))
コード例 #2
0
 def __call__(self, query_points): #This creates a finite-dimensional instance of the process
     instance = self.get_joint_instance(query_points)
     if isinstance(instance, ProbabilisticModel) and self.has_posterior_instance:
         instance.set_posterior_model(self.active_submodel.posterior_model)
         if self.active_submodel.observed_submodel is not None:
             observed_variables = [DeterministicVariable(value=var._observed_value[:, 0, :],
                                                         name=var.name, is_observed=True)
                                   for var in self.active_submodel.observed_submodel.variables] #TODO: Work in progress with observed variables
             instance.posterior_model.add_variables(observed_variables)
     return instance
コード例 #3
0
 def construct_posterior_model(self, joint_model):
     test_sample = joint_model._get_sample(1, observed=False)
     posterior_model = ProbabilisticModel([
         DeterministicVariable(value[0, 0, :],
                               variable.name,
                               learnable=True)
         for variable, value in test_sample.items()
         if (not variable.is_observed)
         and not isinstance(variable, (DeterministicVariable, RootVariable))
     ])
     return posterior_model
コード例 #4
0
# Observe data #
[yt.observe(data[yt][:, 0, :]) for yt in y]

# Variational distribution
# N = int(T*(T+1)/2)
# v1 = DeterministicVariable(torch.normal(0., 0.1, (N,)), "v1", learnable=True)
# v2 = DeterministicVariable(torch.normal(0., 0.1, (N,)), "v2", learnable=True)
# b1 = DeterministicVariable(torch.normal(0., 0.1, (T,1)), "b1", learnable=True)
# w1 = DeterministicVariable(torch.normal(0., 0.1, (N,)), "w1", learnable=True)
# w2 = DeterministicVariable(torch.normal(0., 0.1, (N,)), "w2", learnable=True)
# b2 = DeterministicVariable(torch.normal(0., 0.1, (T,1)), "b2", learnable=True)
# Qz = NormalVariable(torch.zeros((T, 1)), torch.ones((T, 1)), "z")
# Qtrz = Bias(b1)(TriangularLinear(w1, T)(TriangularLinear(w2, T, upper=True)(Sigmoid()(Bias(b2)(TriangularLinear(v1, T)(TriangularLinear(v2, T, upper=True)(Qz)))))))

# Variational distribution
u1 = DeterministicVariable(torch.normal(0., 1., (T, 1)), "u1", learnable=True)
w1 = DeterministicVariable(torch.normal(0., 1., (T, 1)), "w1", learnable=True)
b1 = DeterministicVariable(torch.normal(0., 1., (1, 1)), "b1", learnable=True)
u2 = DeterministicVariable(torch.normal(0., 1., (T, 1)), "u2", learnable=True)
w2 = DeterministicVariable(torch.normal(0., 1., (T, 1)), "w2", learnable=True)
b2 = DeterministicVariable(torch.normal(0., 1., (1, 1)), "b2", learnable=True)
z = NormalVariable(torch.zeros((T, 1)),
                   torch.ones((T, 1)),
                   "z",
                   learnable=True)
Qtrz = PlanarFlow(w2, u2, b2)(PlanarFlow(w1, u1, b1)(z))

Qx = []
for t in range(0, T):
    Qx.append(DeterministicVariable(Qtrz[t], name=x_names[t]))
コード例 #5
0
encoder1 = BF.BrancherFunction(
    EncoderArchitecture1(image_size=image_size, latent_size2=latent_size2))
encoder2 = BF.BrancherFunction(
    EncoderArchitecture2(latent_size1=latent_size1, latent_size2=latent_size2))
decoder1 = BF.BrancherFunction(
    DecoderArchitecture1(latent_size1=latent_size1, latent_size2=latent_size2))
decoder2 = BF.BrancherFunction(
    DecoderArchitecture2(latent_size2=latent_size2, image_size=image_size))

# Generative model
z1sd = 1.
z2sd = 0.5  #0.01
z1 = NormalVariable(np.zeros((latent_size1, )),
                    z1sd * np.ones((latent_size1, )),
                    name="z1")
decoder_output1 = DeterministicVariable(decoder1(z1), name="decoder_output1")
z2 = NormalVariable(BF.relu(decoder_output1["mean"]),
                    z2sd * np.ones((latent_size2, )),
                    name="z2")
decoder_output2 = DeterministicVariable(decoder2(z2), name="decoder_output2")
x = BinomialVariable(total_count=1, logits=decoder_output2["mean"], name="x")
model = ProbabilisticModel([x, z1, z2])

# Amortized variational distribution
b_size = 10
Qx = EmpiricalVariable(dataset, batch_size=b_size, name="x", is_observed=True)
encoder_output1 = DeterministicVariable(encoder1(Qx), name="encoder_output1")
Qz2 = NormalVariable(encoder_output1["mean"], encoder_output1["sd"], name="z2")
encoder_output2 = DeterministicVariable(encoder2(encoder_output1["mean"]),
                                        name="encoder_output2")
Qz1 = NormalVariable(encoder_output2["mean"], encoder_output2["sd"], name="z1")
コード例 #6
0
n_itr = 2000  #2000
image_size = 64
N_rep = 5
loss_list1 = []
loss_list2 = []
loss_list3 = []
imagesGT = []
imagesNoise = []
images1 = []
images2 = []
images3 = []
for rep in range(N_rep):
    h_size = 120
    W1 = DeterministicVariable(np.random.normal(0., 0.2, (h_size, h_size)),
                               "W1",
                               learnable=False)
    W2 = DeterministicVariable(np.random.normal(0., 0.2, (h_size, h_size)),
                               "W2",
                               learnable=False)
    #V = DeterministicVariable(np.random.normal(0., 1., (100, h_size)), "V", learnable=False)

    f = lambda z, W: z + BF.tanh(BF.matmul(W, z))
    F = lambda z, W1, W2: f(f(z, W1), W2)

    measurement_noise = 2.  #1.5
    z = [
        NormalVariable(np.zeros((h_size, 1)),
                       np.ones((h_size, 1)),
                       "z0",
                       learnable=False)
コード例 #7
0
        DecoderArchitecture2(latent_size2=latent_size2,
                             latent_size3=latent_size3))
    decoder3 = BF.BrancherFunction(
        DecoderArchitecture3(latent_size3=latent_size3, image_size=image_size))
    decoderLabel = BF.BrancherFunction(
        DecoderArchitectureLabel(latent_size2=latent_size2,
                                 num_classes=num_classes))

    # # Generative model
    z1sd = 1.5  # 1
    z2sd = 0.25  # 0.25
    z3sd = 0.15
    z1 = NormalVariable(np.zeros((latent_size1, )),
                        z1sd * np.ones((latent_size1, )),
                        name="z1")
    decoder_output1 = DeterministicVariable(decoder1(z1),
                                            name="decoder_output1")
    z2 = NormalVariable(BF.relu(decoder_output1["mean"]),
                        z2sd * np.ones((latent_size2, )),
                        name="z2")
    label_logits = DeterministicVariable(decoderLabel(z2), "label_logits")
    labels = CategoricalVariable(logits=label_logits, name="labels")
    decoder_output2 = DeterministicVariable(decoder2(z2),
                                            name="decoder_output2")
    z3 = NormalVariable(BF.relu(decoder_output2["mean"]),
                        z3sd * np.ones((latent_size3, )),
                        name="z3")
    decoder_output3 = DeterministicVariable(decoder3(z3),
                                            name="decoder_output3")
    x = BinomialVariable(total_count=1,
                         logits=decoder_output3["mean"],
                         name="x")
コード例 #8
0
import matplotlib.pyplot as plt
import numpy as np
import torch

from brancher.variables import ProbabilisticModel
from brancher.standard_variables import NormalVariable, DeterministicVariable, LogNormalVariable
import brancher.functions as BF
from brancher.visualizations import plot_density
from brancher.transformations import PlanarFlow
from brancher import inference
from brancher.visualizations import plot_posterior

# Model
M = 8
y = NormalVariable(torch.zeros((M, )), 1. * torch.ones((M, )), "y")
y0 = DeterministicVariable(y[1], "y0")
d = NormalVariable(y, torch.ones((M, )), "d")
model = ProbabilisticModel([d, y, y0])

# get samples
d.observe(d.get_sample(55, input_values={y: 1. * torch.ones((M, ))}))

# Variational distribution
u1 = DeterministicVariable(torch.normal(0., 1., (M, 1)), "u1", learnable=True)
w1 = DeterministicVariable(torch.normal(0., 1., (M, 1)), "w1", learnable=True)
b1 = DeterministicVariable(torch.normal(0., 1., (1, 1)), "b1", learnable=True)
u2 = DeterministicVariable(torch.normal(0., 1., (M, 1)), "u2", learnable=True)
w2 = DeterministicVariable(torch.normal(0., 1., (M, 1)), "w2", learnable=True)
b2 = DeterministicVariable(torch.normal(0., 1., (1, 1)), "b2", learnable=True)
z = NormalVariable(torch.zeros((M, 1)),
                   torch.ones((M, 1)),
コード例 #9
0
 def __call__(self, var):
     return DeterministicVariable(BF.sigmoid(var),
                                  log_determinant=-BF.log(BF.sigmoid(var)) - BF.log(1 - BF.sigmoid(var)),
                                  name="Sigmoid({})".format(var.name))
コード例 #10
0
 def __call__(self, var):
     return DeterministicVariable(BF.exp(var), log_determinant=-var, name="Exp({})".format(var.name))
コード例 #11
0
import matplotlib.pyplot as plt
import numpy as np

from brancher.standard_variables import NormalVariable, DeterministicVariable, DirichletVariable
from brancher.variables import ProbabilisticModel
import brancher.functions as BF
from brancher import inference
from brancher import geometric_ranges

x = DeterministicVariable(1., "x", is_observed=True)
y = NormalVariable(-1., 0.1, "y", is_observed=True)
z = NormalVariable(0., 0.1, "z", is_observed=True)
w = DirichletVariable(np.ones((3, 1)), "w", is_policy=True, learnable=True)
r = DeterministicVariable((w[0] * x + w[1] * y + w[2] * z),
                          "r",
                          is_reward=True,
                          is_observed=True)

model = ProbabilisticModel([w, x, y, z, r])

print(model.get_average_reward(10))

# Train control
num_itr = 3000
inference.perform_inference(model,
                            inference_method=inference.MaximumLikelihood(),
                            number_iterations=num_itr,
                            number_samples=9,
                            optimizer="Adam",
                            lr=0.01)
reward_list = model.diagnostics[
コード例 #12
0
                                   is_observed=True)
        labels_test = EmpiricalVariable(output_labels_test,
                                        indices=minibatch_indices,
                                        name="labels",
                                        is_observed=True)

        # Forward pass
        in_channels = 1
        out_channels = 10
        image_size = 28
        Wk = NormalVariable(loc=np.zeros((out_channels, in_channels, 2, 2)),
                            scale=10 * np.ones(
                                (out_channels, in_channels, 2, 2)),
                            name="Wk")
        z = DeterministicVariable(BF.mean(BF.relu(BF.conv2d(x, Wk, stride=1)),
                                          (2, 3)),
                                  name="z")
        Wl = NormalVariable(loc=np.zeros((num_classes, out_channels)),
                            scale=10 * np.ones((num_classes, out_channels)),
                            name="Wl")
        b = NormalVariable(loc=np.zeros((num_classes, 1)),
                           scale=10 * np.ones((num_classes, 1)),
                           name="b")
        reshaped_z = BF.reshape(z, shape=(out_channels, 1))
        k = CategoricalVariable(logits=BF.linear(reshaped_z, Wl, b), name="k")

        # Probabilistic model
        model = ProbabilisticModel([k])

        # Observations
        k.observe(labels)
コード例 #13
0
                           is_observed=True)

# Forward pass
in_channels = 1
out_channels1 = 10
out_channels2 = 20
image_size = 28
Wk1 = NormalVariable(loc=np.zeros((out_channels1, in_channels, 3, 3)),
                     scale=np.ones((out_channels1, in_channels, 3, 3)),
                     name="Wk1")
Wk2 = NormalVariable(loc=np.zeros((out_channels2, out_channels1, 3, 3)),
                     scale=np.ones((out_channels2, out_channels1, 3, 3)),
                     name="Wk2")
z = DeterministicVariable(BF.mean(
    BF.conv2d(BF.relu(BF.conv2d(x, Wk1, stride=2, padding=0)),
              Wk2,
              stride=2,
              padding=0), (2, 3)),
                          name="z")
Wl = NormalVariable(loc=np.zeros((num_classes, out_channels2)),
                    scale=np.ones((num_classes, out_channels2)),
                    name="Wl")
b = NormalVariable(loc=np.zeros((num_classes, 1)),
                   scale=np.ones((num_classes, 1)),
                   name="b")
reshaped_z = BF.reshape(z, shape=(out_channels2, 1))
k = CategoricalVariable(logits=BF.linear(reshaped_z, Wl, b), name="k")

# Probabilistic model
model = ProbabilisticModel([k])
samples = model.get_sample(10)
コード例 #14
0
from brancher.standard_variables import NormalVariable, DeterministicVariable
import brancher.functions as BF

from numpy import sin

##
a = DeterministicVariable(1.5, 'a')
b = DeterministicVariable(0.3, 'b')
c = DeterministicVariable(0.3, 'c')
d = BF.sin(a + b**2) / (3 * c)

##
print(d)
        MSE = np.mean((np.array(ground_truth) - np.array(x_mean2)) ** 2)
        var = 0.5 * (np.array(upper_bound2) - np.array(lower_bound2)) ** 2
        Lk = np.mean(
            0.5 * (np.array(ground_truth) - np.array(x_mean2)) ** 2 / var + 0.5 * np.log(var) + 0.5 * np.log(
                2 * np.pi))
        print("MF MSE {}".format(MSE))
        print("MF lk {}".format(Lk))
        MSE2.append(MSE)
        Lk2.append(Lk)

        # Multivariate normal variational distribution #
        Qomega = NormalVariable(2 * np.pi * f, 1., "omega", learnable=True)
        QV = MultivariateNormalVariable(loc=np.zeros((T,)),
                                        scale_tril=0.1 * np.identity(T),
                                        learnable=True)
        Qx = [DeterministicVariable(QV[0], 'x0')]

        for t in range(1, T):
            Qx.append(DeterministicVariable(QV[t], x_names[t]))
        variational_posterior = ProbabilisticModel(Qx + [Qomega])
        AR_model.set_posterior_model(variational_posterior)

        # Inference #
        inference.perform_inference(AR_model,
                                    number_iterations=N_itr,
                                    number_samples=N_smpl,
                                    optimizer=optimizer,
                                    lr=lr)

        loss_list3 = AR_model.diagnostics["loss curve"]
コード例 #16
0
net = InferenceNet()

# Variational bayesian update
brancher_net = BF.BrancherFunction(net)


def bayesian_update(r, w0, w1, mx, my, sx, sy):
    out = brancher_net(r, w0, w1, mx, my, sx, sy)
    return [out[0], out[1], out[2], out[3]]


# Model
T = 20

sigma = 0.1
mx = [DeterministicVariable(0., "mx_0")]
my = [DeterministicVariable(0., "my_0")]
sx = [DeterministicVariable(0., "sx_0")]
sy = [DeterministicVariable(0., "sy_0")]
mux = [NormalVariable(mx[0].value, sx[0].value, "mux_0")]
muy = [NormalVariable(my[0].value, sy[0].value, "muy_0")]
Qmux = [NormalVariable(mx[0], sx[0], "mux_0")]
Qmuy = [NormalVariable(my[0], sy[0], "muy_0")]
w = []
r = []
for t in range(T):

    # Reward
    w.append(
        DirichletVariable(np.ones((2, 1)),
                          "w_{}".format(t),
コード例 #17
0
 def __call__(self, var):
     return DeterministicVariable(self.s*var,
                                  log_determinant=-BF.log(BF.abs(DeterministicVariable(self.s,
                                                                                       learnable=self.learnable,
                                                                                       name="{}_scale".format(var.name)))),
                                  name="{}*{}".format(self.s, var.name))
コード例 #18
0
    print("Repetition: {}".format(rep))
    # Initialize encoder and decoders
    encoder1 = BF.BrancherFunction(EncoderArchitecture1(image_size=image_size, latent_size3=latent_size3))
    encoder2 = BF.BrancherFunction(EncoderArchitecture2(latent_size2=latent_size2, latent_size3=latent_size3))
    encoder3 = BF.BrancherFunction(EncoderArchitecture3(latent_size1=latent_size1, latent_size2=latent_size2))

    decoder1 = BF.BrancherFunction(DecoderArchitecture1(latent_size1=latent_size1, latent_size2=latent_size2))
    decoder2 = BF.BrancherFunction(DecoderArchitecture2(latent_size2=latent_size2, latent_size3=latent_size3))
    decoder3 = BF.BrancherFunction(DecoderArchitecture3(latent_size3=latent_size3, image_size=image_size))

    # Generative model
    z1sd = 1.5 #1
    z2sd = 0.25 #0.25
    z3sd = 0.15
    z1 = NormalVariable(np.zeros((latent_size1,)), z1sd*np.ones((latent_size1,)), name="z1")
    decoder_output1 = DeterministicVariable(decoder1(z1), name="decoder_output1")
    z2 = NormalVariable(BF.relu(decoder_output1["mean"]), z2sd*np.ones((latent_size2,)), name="z2")
    decoder_output2 = DeterministicVariable(decoder2(z2), name="decoder_output2")
    z3 = NormalVariable(BF.relu(decoder_output2["mean"]), z3sd*np.ones((latent_size3,)), name="z3")
    decoder_output3 = DeterministicVariable(decoder3(z3), name="decoder_output3")
    x = BinomialVariable(total_count=1, logits=decoder_output3["mean"], name="x")
    model = ProbabilisticModel([x, z1, z2, z3])

    # Amortized variational distribution
    b_size = 200
    Qx = EmpiricalVariable(dataset, batch_size=b_size, name="x", is_observed=True)
    encoder_output1 = DeterministicVariable(encoder1(Qx), name="encoder_output1")
    Qz3 = NormalVariable(encoder_output1["mean"], encoder_output1["sd"], name="z3")
    encoder_output2 = DeterministicVariable(encoder2(encoder_output1["mean"]), name="encoder_output2")
    Qz2 = NormalVariable(encoder_output2["mean"], encoder_output2["sd"], name="z2")
    encoder_output3 = DeterministicVariable(encoder3(encoder_output2["mean"]), name="encoder_output3")
コード例 #19
0
 def __call__(self, var):
     return DeterministicVariable(var + self.b,
                                  name="{}+{}".format(var.name, self.b))
コード例 #20
0
        h1 = self.f2(self.l2(h0))
        output_mean = self.l3(h1)
        return {"mean": output_mean}


# Initialize encoder and decoders
encoder = BF.BrancherFunction(
    EncoderArchitecture(image_size=image_size, latent_size=latent_size))
decoder = BF.BrancherFunction(
    DecoderArchitecture(latent_size=latent_size, image_size=image_size))

# Generative model
z = NormalVariable(np.zeros((latent_size, )),
                   np.ones((latent_size, )),
                   name="z")
decoder_output = DeterministicVariable(decoder(z), name="decoder_output")
x = BinomialVariable(total_count=1, logits=decoder_output["mean"], name="x")
model = ProbabilisticModel([x, z])

# Amortized variational distribution
Qx = EmpiricalVariable(dataset, batch_size=100, name="x", is_observed=True)
encoder_output = DeterministicVariable(encoder(Qx), name="encoder_output")
Qz = NormalVariable(encoder_output["mean"], encoder_output["sd"], name="z")
model.set_posterior_model(ProbabilisticModel([Qx, Qz]))

# Joint-contrastive inference
inference.perform_inference(
    model,
    inference_method=ReverseKL(gradient_estimator=PathwiseDerivativeEstimator),
    number_iterations=1000,
    number_samples=1,
コード例 #21
0
 def __call__(self, var):
     output = BF.matmul(self.tri_matrix, var)
     log_det = -BF.sum(BF.log(BF.abs(self.tri_matrix[:, self.diag_indices[0], self.diag_indices[1]]) + self.shift), axis=1)
     return DeterministicVariable(output,
                                  log_determinant=log_det,
                                  name="L {}".format(var.name))
コード例 #22
0
            if t in y_range:
                y_name = "y{}".format(t)
                y_names.append(y_name)
                y.append(NormalVariable(x[t], measure_noise, y_name))
        AR_model = ProbabilisticModel(x + y + z + h)

        # Generate data #
        data = AR_model._get_sample(number_samples=1)
        time_series = [float(data[yt].data) for yt in y]
        ground_truth = [float(data[xt].data) for xt in x]

        # Observe data #
        [yt.observe(data[yt][:, 0, :]) for yt in y]

        # Structured variational distribution #
        mx0 = DeterministicVariable(value=0., name="mx0", learnable=True)
        Qx = [NormalVariable(mx0, 5 * driving_noise, 'x0', learnable=True)]
        Qx_mean = [RootVariable(0., 'x0_mean', learnable=True)]
        Qxlambda = [RootVariable(-1., 'x0_lambda', learnable=True)]

        mh0 = DeterministicVariable(value=0., name="mh0", learnable=True)
        Qh = [NormalVariable(mh0, 5 * driving_noise, 'h0', learnable=True)]
        Qh_mean = [RootVariable(0., 'h0_mean', learnable=True)]
        Qhlambda = [RootVariable(-1., 'h0_lambda', learnable=True)]

        mz0 = DeterministicVariable(value=0., name="mz0", learnable=True)
        Qz = [NormalVariable(mz0, 5 * driving_noise, 'z0', learnable=True)]
        Qz_mean = [RootVariable(0., 'z0_mean', learnable=True)]
        Qzlambda = [RootVariable(-1., 'z0_lambda', learnable=True)]

        for t in range(1, T):
コード例 #23
0
import matplotlib.pyplot as plt
import numpy as np
import torch

from brancher.variables import ProbabilisticModel
from brancher.standard_variables import NormalVariable, DeterministicVariable, LogNormalVariable
import brancher.functions as BF
from brancher.visualizations import plot_density
from brancher.transformations import Exp, Scaling, TriangularLinear, Sigmoid, Bias
from brancher import inference
from brancher.visualizations import plot_posterior

# Model
M = 2
y = NormalVariable(torch.zeros((M, )), 1. * torch.ones((M, )), "y")
y0 = DeterministicVariable(y[0], "y0")
y1 = DeterministicVariable(y[1], "y1")
d = NormalVariable(y**2, torch.ones((M, )), "d")
model = ProbabilisticModel([d, y, y0])

# get samples
d.observe(d.get_sample(25, input_values={y: 0.3 * torch.ones((M, ))}))

# Variational distribution
N = int(M * (M + 1) / 2)
v1 = DeterministicVariable(np.random.normal(0., 0.1, (N, )),
                           "v1",
                           learnable=True)
v2 = DeterministicVariable(np.random.normal(0., 0.1, (N, )),
                           "v2",
                           learnable=True)