Beispiel #1
0
# Architecture parameters
number_hidden_units = 20
b1 = NormalVariable(np.zeros((number_hidden_units, 1)), 10 * np.ones(
    (number_hidden_units, 1)), "b1")
b2 = NormalVariable(np.zeros((number_output_classes, 1)), 10 * np.ones(
    (number_output_classes, 1)), "b2")
weights1 = NormalVariable(np.zeros(
    (number_hidden_units, number_pixels)), 10 * np.ones(
        (number_hidden_units, number_pixels)), "weights1")
weights2 = NormalVariable(
    np.zeros((number_output_classes, number_hidden_units)), 10 * np.ones(
        (number_output_classes, number_hidden_units)), "weights2")

# Forward pass
hidden_units = BF.tanh(BF.matmul(weights1, x) + b1)
final_activations = BF.matmul(weights2, hidden_units) + b2
k = CategoricalVariable(softmax_p=final_activations, name="k")

# Probabilistic model
model = ProbabilisticModel([k])

# Observations
k.observe(labels)

# Variational Model
Qb1 = NormalVariable(np.zeros((number_hidden_units, 1)),
                     0.2 * np.ones((number_hidden_units, 1)),
                     "b1",
                     learnable=True)
Qb2 = NormalVariable(np.zeros((number_output_classes, 1)),
imagesGT = []
imagesNoise = []
images1 = []
images2 = []
images3 = []
for rep in range(N_rep):
    h_size = 120
    W1 = DeterministicVariable(np.random.normal(0., 0.2, (h_size, h_size)),
                               "W1",
                               learnable=False)
    W2 = DeterministicVariable(np.random.normal(0., 0.2, (h_size, h_size)),
                               "W2",
                               learnable=False)
    #V = DeterministicVariable(np.random.normal(0., 1., (100, h_size)), "V", learnable=False)

    f = lambda z, W: z + BF.tanh(BF.matmul(W, z))
    F = lambda z, W1, W2: f(f(z, W1), W2)

    measurement_noise = 2.  #1.5
    z = [
        NormalVariable(np.zeros((h_size, 1)),
                       np.ones((h_size, 1)),
                       "z0",
                       learnable=False)
    ]
    img = [
        DeterministicVariable(decoder(BF.reshape(z[0], (h_size, 1, 1))),
                              "img0",
                              learnable=False)
    ]
    x = [
        labels = EmpiricalVariable(output_labels,
                                   indices=minibatch_indices,
                                   name="labels",
                                   is_observed=True)

        # Architecture parameters
        weights1 = NormalVariable(
            np.zeros((number_hidden_nodes, number_regressors)), 10 * np.ones(
                (number_hidden_nodes, number_regressors)), "weights1")
        weights2 = NormalVariable(
            np.zeros((number_output_classes, number_hidden_nodes)),
            10 * np.ones(
                (number_output_classes, number_hidden_nodes)), "weights2")

        # Forward pass
        final_activations = BF.matmul(weights2, BF.tanh(BF.matmul(weights1,
                                                                  x)))
        k = CategoricalVariable(softmax_p=final_activations, name="k")

        # Probabilistic model
        model = ProbabilisticModel([k])

        # Observations
        k.observe(labels)

        # Variational model
        num_particles = N
        initial_locations1 = [
            np.random.normal(0., 1., (number_hidden_nodes, number_regressors))
            for _ in range(num_particles)
        ]
        initial_locations2 = [
Beispiel #4
0
from brancher.standard_variables import NormalVariable, LogNormalVariable, LogitNormalVariable
from brancher import inference
import brancher.functions as BF

# Probabilistic model #
T = 100

nu = LogNormalVariable(0.3, 1., 'nu')
x0 = NormalVariable(0., 1., 'x0')
b = LogitNormalVariable(0.5, 1.5, 'b')

x = [x0]
names = ["x0"]
for t in range(1, T):
    names.append("x{}".format(t))
    x.append(NormalVariable(BF.tanh(b * x[t - 1]), nu, names[t]))
AR_model = ProbabilisticModel(x)

# Generate data #
data = AR_model._get_sample(number_samples=1)
time_series = [float(data[xt].data) for xt in x]
true_b = data[b].data
true_nu = data[nu].data
print("The true coefficient is: {}".format(float(true_b)))

# Observe data #
[xt.observe(data[xt][:, 0, :]) for xt in x]

# Variational distribution #
Qnu = LogNormalVariable(0.5, 1., "nu", learnable=True)
Qb = LogitNormalVariable(0.5, 0.5, "b", learnable=True)