Exemplo n.º 1
0
                             is_observed=True)
test_images = EmpiricalVariable(np.reshape(test.test_data.numpy(),
                                           newshape=(test.test_data.shape[0],
                                                     number_pixels, 1)),
                                indices=test_indices,
                                name="x_test",
                                is_observed=True)
test_labels = EmpiricalVariable(test.test_labels.numpy(),
                                indices=test_indices,
                                name="labels",
                                is_observed=True)
test_model = ProbabilisticModel([test_images, test_labels])

s = 0
for _ in range(num_images):
    test_sample = test_model._get_sample(1)
    test_image, test_label = test_sample[test_images], test_sample[test_labels]
    model_output = np.reshape(np.mean(model._get_posterior_sample(
        10, input_values={x: test_image})[k].cpu().detach().numpy(),
                                      axis=0),
                              newshape=(10, ))
    s += 1 if int(np.argmax(model_output)) == int(
        test_label.cpu().detach().numpy()) else 0
print("Accuracy: {} %".format(100 * s / float(num_images)))

#weight_map = variational_model._get_sample(1)[Qweights1].data[0, 0, 0, :]
#plt.imshow(np.reshape(weight_map, (28, 28)))
#plt.show()

plt.plot(model.diagnostics["loss curve"])
plt.show()
Exemplo n.º 2
0
                NormalVariable(new_x,
                               np.sqrt(dt) * driving_noise, x_names[t]))
            h.append(
                NormalVariable(new_h,
                               np.sqrt(dt) * driving_noise, h_names[t]))
            z.append(
                NormalVariable(new_z,
                               np.sqrt(dt) * driving_noise, z_names[t]))
            if t in y_range:
                y_name = "y{}".format(t)
                y_names.append(y_name)
                y.append(NormalVariable(x[t], measure_noise, y_name))
        AR_model = ProbabilisticModel(x + y + z + h)

        # Generate data #
        data = AR_model._get_sample(number_samples=1)
        time_series = [float(data[yt].data) for yt in y]
        ground_truth = [float(data[xt].data) for xt in x]

        # Observe data #
        [yt.observe(data[yt][:, 0, :]) for yt in y]

        # Structured variational distribution #
        mx0 = DeterministicVariable(value=0., name="mx0", learnable=True)
        Qx = [NormalVariable(mx0, 5 * driving_noise, 'x0', learnable=True)]
        Qx_mean = [RootVariable(0., 'x0_mean', learnable=True)]
        Qxlambda = [RootVariable(-1., 'x0_lambda', learnable=True)]

        mh0 = DeterministicVariable(value=0., name="mh0", learnable=True)
        Qh = [NormalVariable(mh0, 5 * driving_noise, 'h0', learnable=True)]
        Qh_mean = [RootVariable(0., 'h0_mean', learnable=True)]
                           "z{}".format(t),
                           learnable=False))
        img.append(
            DeterministicVariable(decoder(BF.reshape(z[-1], (h_size, 1, 1))),
                                  "img{}".format(t),
                                  learnable=False))
        if t_cond(t):
            x.append(
                NormalVariable(img[-1],
                               measurement_noise * np.ones(
                                   (3, image_size, image_size)),
                               "x{}".format(t),
                               learnable=False))
    model = ProbabilisticModel(x + z + img)

    samples = model._get_sample(1)

    imagesGT.append([
        np.reshape(samples[img[t]].detach().numpy(),
                   (3, image_size, image_size)) for t in range(T)
    ])
    imagesNoise.append([
        np.reshape(samples[x[t]].detach().numpy(), (3, image_size, image_size))
        for t in range(T)
    ])

    # Observe model
    [xt.observe(samples[xt].detach().numpy()[0, :, :, :, :]) for xt in x]

    #### 1 ASDI ####
Exemplo n.º 4
0
nu = LogNorm(0.2, 0.5, name="nu")
mean = b + w1 * x1 + w2 * x2 + w12 * x1 * x2
y = Norm(mean, nu, name="y")
model = ProbabilisticModel([y])

# Variational distributions
Qb = Norm(0., 1., name="b", learnable=True)
Qw1 = Norm(0., 1., name="w1", learnable=True)
Qw2 = Norm(0., 1., name="w2", learnable=True)
Qw12 = Norm(0., 1., name="w12", learnable=True)
Qnu = LogNorm(0.2, 0.5, name="nu", learnable=True)
variational_model = ProbabilisticModel([Qb, Qw1, Qw2, Qw12, Qnu])
model.set_posterior_model(variational_model)

# Generate data
ground_samples = model._get_sample(1)

# Observe data
data = np.reshape(ground_samples[y].cpu().detach().numpy(), newshape=(n, 1, 1))
y.observe(data)

# Inference
inference.perform_inference(model,
                            number_iterations=5000,
                            number_samples=100,
                            optimizer='Adam')

# Plot
#plt.plot(model.diagnostics["loss curve"])
#plt.show()
number_samples = 50
x1_input_variable = np.random.normal(1.5, 1.5, (int(number_samples/2), number_regressors, 1))
x1_labels = 0*np.ones((int(number_samples/2), 1))
x2_input_variable = np.random.normal(-1.5, 1.5, (int(number_samples/2), number_regressors, 1))
x2_labels = 1*np.ones((int(number_samples/2),1))
input_variable = np.concatenate((x1_input_variable, x2_input_variable), axis=0)
labels = np.concatenate((x1_labels, x2_labels), axis=0)

# Probabilistic model
weights = NormalVariable(np.zeros((1, number_regressors)), 0.5*np.ones((1, number_regressors)), "weights")
x = DeterministicVariable(input_variable, "x", is_observed=True)
logit_p = BF.matmul(weights, x)
k = BinomialVariable(1, logit_p=logit_p, name="k")
model = ProbabilisticModel([k])

samples = model._get_sample(300)

# Observations
k.observe(labels)

# Variational Model
#Qweights = NormalVariable(np.zeros((1, number_regressors)),
#                          np.ones((1, number_regressors)), "weights", learnable=True)
Qweights = MultivariateNormalVariable(loc=np.zeros((1, number_regressors)),
                                      covariance_matrix=np.identity(number_regressors),
                                      name="weights", learnable=True)
variational_model = ProbabilisticModel([Qweights])
model.set_posterior_model(variational_model)

# Inference
inference.perform_inference(model,
Exemplo n.º 6
0
b = CauchyVariable(loc=np.random.normal(0, 1, (dim, dim )),
                   scale=2 + np.random.normal(0, 1, (dim, dim ))**2,
                   name="b", learnable=True)

c = CauchyVariable(loc=a + b,
                   scale=1 + a**2,
                   name="c", learnable=True)

d = CauchyVariable(loc=c,
                   scale=1 + b**2,
                   name="d", learnable=True)

model = ProbabilisticModel([d])

#print(model.get_mean())
#print(model.get_variance())
#print(model.get_entropy())

n_samples = 5
ent_list = []
samp_ent_list = []
for itr in range(50):
    q_sample = model._get_sample(n_samples)
    entropy = sum([e.sum()/float(n_samples) for e in model._get_entropy(q_sample).values()])
    ent_list.append(float(entropy))
    sampled_entropy = -model.calculate_log_probability(q_sample).mean()
    samp_ent_list.append(float(sampled_entropy))

print("Semi-analytic: {} +- {}".format(np.mean(ent_list), np.std(ent_list)))
print("Stochastic: {} +- {}".format(np.mean(samp_ent_list), np.std(samp_ent_list)))