Example #1
0
    decoder_output2 = DeterministicVariable(decoder2(z2),
                                            name="decoder_output2")
    z3 = NormalVariable(BF.relu(decoder_output2["mean"]),
                        z3sd * np.ones((latent_size3, )),
                        name="z3")
    decoder_output3 = DeterministicVariable(decoder3(z3),
                                            name="decoder_output3")
    x = BinomialVariable(total_count=1,
                         logits=decoder_output3["mean"],
                         name="x")
    model = ProbabilisticModel([x, z1, z2, z3, labels])

    # Amortized variational distribution

    minibatch_indices = RandomIndices(dataset_size=dataset_size,
                                      batch_size=b_size,
                                      name="indices",
                                      is_observed=True)

    Qx = EmpiricalVariable(dataset,
                           indices=minibatch_indices,
                           name="x",
                           is_observed=True)

    Qlabels = EmpiricalVariable(output_labels,
                                indices=minibatch_indices,
                                name="labels",
                                is_observed=True)

    encoder_output1 = DeterministicVariable(encoder1(Qx),
                                            name="encoder_output1")
    Qz3 = NormalVariable(encoder_output1["mean"],
Example #2
0
                                   train=True,
                                   download=True,
                                   transform=None)
test = torchvision.datasets.MNIST(root='./data',
                                  train=False,
                                  download=True,
                                  transform=None)
dataset_size = len(train)
input_variable = np.reshape(train.train_data.numpy(),
                            newshape=(dataset_size, number_pixels, 1))
output_labels = train.train_labels.numpy()

# Data sampling model
minibatch_size = 30
minibatch_indices = RandomIndices(dataset_size=dataset_size,
                                  batch_size=minibatch_size,
                                  name="indices",
                                  is_observed=True)
x = EmpiricalVariable(input_variable,
                      indices=minibatch_indices,
                      name="x",
                      is_observed=True)
labels = EmpiricalVariable(output_labels,
                           indices=minibatch_indices,
                           name="labels",
                           is_observed=True)

# Architecture parameters
number_hidden_units = 20
b1 = NormalVariable(np.zeros((number_hidden_units, 1)), 10 * np.ones(
    (number_hidden_units, 1)), "b1")
b2 = NormalVariable(np.zeros((number_output_classes, 1)), 10 * np.ones(
import numpy as np

import chainer
import chainer.links as L
import chainer.functions as F

from brancher.variables import DeterministicVariable, RandomVariable, ProbabilisticModel
from brancher.standard_variables import NormalVariable, EmpiricalVariable, RandomIndices
from brancher.functions import BrancherFunction
import brancher.functions as BF

## Data ##
dataset_size = 100
number_dimensions = 1
dataset1 = np.random.normal(0, 1, (dataset_size, number_dimensions))

## Variables ##
indices = RandomIndices(dataset_size=dataset_size, batch_size=5, name="indices")
a = EmpiricalVariable(dataset1, indices=indices, name='a', is_observed=True)
b = EmpiricalVariable(dataset1, indices=indices, name='a', is_observed=True)

model = ProbabilisticModel([a, b])


## Sample ##
samples = model._get_sample(1)

print(samples[a])
print(samples[b])