コード例 #1
0
import torch
import torch.nn as nn
import torchvision

import matplotlib.pyplot as plt
import numpy as np

import brancher.config as cfg
cfg.set_device('gpu')
print(cfg.device)

from brancher.variables import RootVariable, ProbabilisticModel
from brancher.standard_variables import NormalVariable, EmpiricalVariable, BinomialVariable, DeterministicVariable, LogNormalVariable
from brancher import inference
from brancher.inference import ReverseKL
from brancher.gradient_estimators import Taylor1Estimator, PathwiseDerivativeEstimator, BlackBoxEstimator
import brancher.functions as BF

from brancher.config import device

# Data
image_size = 28*28
latent_size = 2

train = torchvision.datasets.MNIST(root='./data', train=True, download=True, transform=None)
test = torchvision.datasets.MNIST(root='./data', train=False, download=True, transform=None)
dataset_size = len(train)
#dataset = torch.Tensor(np.reshape(train.train_data.numpy(), newshape=(dataset_size, image_size, 1))).double().to(device)
dataset = np.reshape(train.train_data.numpy(), newshape=(dataset_size, image_size, 1))
data_mean = np.mean(dataset)
dataset = (dataset > data_mean).astype("int32")
コード例 #2
0
import brancher.config as cfg
cfg.set_device("cpu")

import matplotlib.pyplot as plt
import numpy as np

from brancher.variables import ProbabilisticModel
from brancher.standard_variables import NormalVariable, LogNormalVariable
from brancher import inference

# Normal model
nu = LogNormalVariable(0., 1., "nu")
mu = NormalVariable(0., 10., "mu")
x = NormalVariable(mu, nu, "x")
model = ProbabilisticModel(
    [x])  # to fix plot_posterior (flatten automatically?)

# # Generate data
nu_real = 1.
mu_real = -2.
data = model.get_sample(number_samples=20,
                        input_values={
                            mu: mu_real,
                            nu: nu_real
                        })

# Observe data
x.observe(data)

# Variational model
Qnu = LogNormalVariable(0., 1., "nu", learnable=True)