# def log_pdf(theta, x, y):
#     return torch.sum(pdf.log_prob(theta))

# Using log_pdf function based on MultivariateNormal torch distribution

pdf_dtype = torch.float64

pdf = MultivariateNormal(torch.zeros(2, dtype=pdf_dtype),
                         covariance_matrix=torch.eye(2, dtype=pdf_dtype))


def log_pdf(theta, x, y):
    return pdf.log_prob(theta)


model = DistributionModel(log_pdf, 2, dtype=pdf.loc.dtype)

# %% Setup HMC sampler

sampler = HMC(model,
              theta0=torch.tensor([-1, 1], dtype=model.dtype),
              dataloader=DataLoader(EmptyXYDataset()),
              tuner=HMCDATuner(1., e0=2.))

# %% Run HMC sampler

sampler.run(num_epochs=11000,
            num_burnin_epochs=1000,
            verbose=True,
            verbose_step=1000)
示例#2
0
from eeyore.models import DistributionModel
from eeyore.samplers import MALA

# %% Set up unnormalized target density

v = torch.tensor([2., 1.], dtype=torch.float64)

# def log_pdf(theta, x, y):
#     return (v[0] - 1) * theta - torch.exp(theta) / v[1] + theta # Jacobian


def log_pdf(theta, x, y):
    return Gamma(v[0], 1 / v[1]).log_prob(torch.exp(theta)) + theta


model = DistributionModel(log_pdf, 1, dtype=torch.float64)

# %% Setup MALA sampler

sampler = MALA(model,
               theta0=torch.tensor([-1], dtype=model.dtype),
               dataloader=DataLoader(EmptyXYDataset()),
               step=0.25)

# %% Run MALA sampler

sampler.run(num_epochs=11000, num_burnin_epochs=1000)

# %% For convenience, name the chain list

chain_list = sampler.get_chain()