Exemplo n.º 1
0
class GaussianMixtureModel(Distribution):
    def __init__(self, n_components=2, n_dims=1):
        super().__init__()
        self.n_components = n_components
        self.n_dims = n_dims
        self.model = MixtureModel([
            Normal(torch.randn(n_dims), torch.eye(n_dims))
            for _ in range(n_components)
        ], [1.0 / n_components for _ in range(n_components)])

    def log_prob(self, value):
        return self.model.log_prob(value)

    def sample(self, batch_size):
        return self.model.sample(batch_size)

    def fit(self, x, **kwargs):
        data = Data(x)
        stats = train(data, self.model, cross_entropy, **kwargs)
        return stats

    def predict(self, x):
        log_probs = torch.stack(
            [sub_model.log_prob(x) for sub_model in self.model.models])
        _, labels = log_probs.max(dim=0)
        return labels
Exemplo n.º 2
0
 def __init__(self, n_components=2, n_dims=1):
     super().__init__()
     self.n_components = n_components
     self.n_dims = n_dims
     self.model = MixtureModel([
         Normal(torch.randn(n_dims), torch.eye(n_dims))
         for _ in range(n_components)
     ], [1.0 / n_components for _ in range(n_components)])
Exemplo n.º 3
0
def test_forward(gan):
    if gan.n_dims == 1:
        q_model = MixtureModel([Normal([-0.5],[[1.0]]), Normal([0.5],[[1.0]])], [0.5, 0.5])
        p_model = MixtureModel([Normal([2.3], [[2.2]]), Normal([-2.3], [[2.2]])], [0.5, 0.5])
    else:
        q_model = MixtureModel([Normal([0., 0.], [1., 0., 0., 1.0]), Normal([0., 0.], [1., 0., 0., 1.0])], [0.25, 0.75])
        p_model = MixtureModel([Normal([0., 0.], [1., 0., 0., 1.0]), Normal([0., 0.], [1., 0., 0., 1.0])], [0.25, 0.75])

    gan(p_model, q_model)
Exemplo n.º 4
0
def test_gans(model):
    X = MixtureModel(
        [Normal(-4., 2.3, learnable=False),
         Normal(4., 2.3, learnable=False)], [0.5, 0.5]).sample(10000)
    X = X.numpy()
    stats = model.fit(X, epochs=5, lr=1e-4)
    preds = model.sample(10000)
    model.predict(model.sample(100))
    model.num_parameters
    try:
        model.log_prob(model.sample(100))
    except NotImplementedError:
        pass
Exemplo n.º 5
0
def test_gmm_clustering():
    model = MixtureModel([
        Normal([3.3, 3.3], [2.3, 0.1, 0.1, 7.]),
        Normal([-5.3, -6.3], [7, 4.2, 3.1, 3])
    ], [0.75, 0.25])

    X = model.sample(100).detach()
    m = GaussianMixtureModel(n_dims=2)
    m.fit(X, epochs=100, track_parameters=False)
    assert m.sample(5).shape == (5, 2)
    assert m.log_prob(m.sample(5)).shape == (5, )
    assert m.predict(X).shape == (100, )
    model.num_parameters
Exemplo n.º 6
0
def test_mcmc_2d():
    true = MixtureModel([
        Normal([5.2, 5.2], [[3.0, 0.0], [0.0, 3.0]]),
        Normal([0.0, 0.0], [[2.0, 0.0], [0.0, 2.0]]),
        Normal([-5.2, -5.2], [[1.5, 0.0], [0.0, 1.5]])
    ], [0.25, 0.5, 0.25])

    samples = metropolis(true, epochs=100, burn_in=10)
    samples = metropolis(true, epochs=100, burn_in=10, keep_every=5)
    samples = metropolis(true, epochs=10, burn_in=1, keep_every=5, init=None)
Exemplo n.º 7
0
def test_gan_train(gan):
    if gan.n_dims == 1:
        q_model = MixtureModel([Normal([-0.5],[[1.0]]), Normal([0.5],[[1.0]])], [0.5, 0.5])
        p_model = MixtureModel([Normal([2.3], [[2.2]]), Normal([-2.3], [[2.2]])], [0.5, 0.5])
    else:
        q_model = MixtureModel([Normal([0., 0.], [1., 0., 0., 1.0]), Normal([0., 0.], [1., 0., 0., 1.0])], [0.25, 0.75])
        p_model = MixtureModel([Normal([0., 0.], [1., 0., 0., 1.0]), Normal([0., 0.], [1., 0., 0., 1.0])], [0.25, 0.75])

    train(p_model, q_model, gan, optimizer="RMSprop", epochs=3, lr=1e-3, batch_size=512)
    X = p_model.sample(100)
    gan.classify(X)
Exemplo n.º 8
0
test_dists = [
    (Normal(0., 1.), 1),
    (Exponential(0.5), 1),
    (Cauchy(0., 1.), 1),
    (Beta(0.5, 1.), 1),
    (LogNormal(0., 1.), 1),
    (Gamma(0.5, 1.), 1),
    (RelaxedBernoulli([0.5]), 1),
    (Uniform(-1., 3.), 1),
    (StudentT(30., 1., 3.), 1),
    (Dirichlet(0.5), 1),
    (FisherSnedecor(10., 10.), 1),
    (HalfCauchy(1.), 1),
    (HalfNormal(1.), 1),
    (Laplace(0., 1.), 1),
    (MixtureModel([Normal(0., 1.), Normal(1., 3.)], [0.25, 0.75]), 1),
    (GumbelMixtureModel([Normal(0., 1.), Normal(1., 3.)], [0.25, 0.75]), 1),
    (GumbelMixtureModel([Normal(0., 1.), Normal(1., 3.)], [0.25, 0.75],
                        hard=False), 1),
    (ChiSquare(4.), 1),
    (Logistic(0., 1.), 1),
    (Rayleigh(), 1),
    (LogLaplace(), 1),
    (LogCauchy(), 1),
    (Categorical(), 1),
    (HyperbolicSecant(), 1),
    (Arcsine(), 1),
    (Bernoulli(), 1),
    (Gumbel(), 1),
    (Rayleigh(), 1),
    (Arcsine(), 1),
Exemplo n.º 9
0
import pytest

models = [
    (Normal(0., 1.), Normal(0., 1.)),
    (Exponential(0.5), Exponential(0.5)),
    (Cauchy(0., 1.), Cauchy(0., 1.)),
    (Beta(0.5, 1.), Beta(0.5, 1.)),
    (LogNormal(0., 1.), LogNormal(0., 1.)),
    (Gamma(0.5, 1.), Gamma(0.5, 1.)),
    (Uniform(-1.0, 3.0), Uniform(-1.0, 3.0)),
    (StudentT(30.0, 1.0, 3.0), StudentT(30.0, 1.0, 3.0)),
    (FisherSnedecor(10.0, 10.0), FisherSnedecor(10.0, 10.0)),
    (HalfCauchy(1.0), HalfCauchy(1.0)),
    (HalfNormal(1.0), HalfNormal(1.0)),
    (Laplace(0., 1.), Laplace(0., 1.)),
    (MixtureModel([Normal(0., 1.), Normal(1., 3.)], [0.25, 0.75]),
     MixtureModel([Normal(0., 1.), Normal(1., 3.)], [0.25, 0.75])),
    (GumbelMixtureModel([Normal(0., 1.), Normal(1., 3.)], [0.25, 0.75]),
     GumbelMixtureModel([Normal(0., 1.), Normal(1., 3.)], [0.25, 0.75])),
    (GumbelMixtureModel([Normal(0., 1.), Normal(1., 3.)], [0.25, 0.75],
                        hard=False),
     GumbelMixtureModel([Normal(0., 1.), Normal(1., 3.)], [0.25, 0.75],
                        hard=False)),
    (Logistic(0., 1.), Logistic(0., 1.)),
    (ChiSquare(4.), ChiSquare(4.)),
    (Normal([0., 0.], [1., 1.]), Normal([0., 0.], [1., 1.])),
    (Exponential([0.5, 0.5]), Exponential([0.5, 0.5])),
    (Cauchy([0., 0.], [1., 1.]), Cauchy([0., 0.], [1., 1.])),
    (Beta([0.5, 0.5], [1., 1.]), Beta([0.5, 0.5], [1., 1.])),
    (LogNormal([0., 0.], [1., 1.]), LogNormal([0., 0.], [1., 1.])),
    (Gamma([0.5, 0.5], [1., 1.]), Gamma([0.5, 0.5], [1., 1.])),
Exemplo n.º 10
0
def js_divergence_2(p_model, q_model, batch_size=64):
    M = MixtureModel([p_model, q_model], [0.5, 0.5])
    return 0.5 * (_other_term(p_model, M, batch_size) +
                  _forward_kl(q_model, M, batch_size))
Exemplo n.º 11
0
def pearson(p_model, q_model, batch_size=64):
    mixture_model = MixtureModel([p_model, q_model], [0.5, 0.5])
    samples = mixture_model.sample(batch_size)
    ratio = ((p_model.log_prob(samples) - mixture_model.log_prob(samples)).exp() \
             - (q_model.log_prob(samples) - mixture_model.log_prob(samples)).exp()).pow(2)
    return ratio.mean()
Exemplo n.º 12
0
def total_variation(p_model, q_model, batch_size=64):
    mixture_model = MixtureModel([p_model, q_model], [0.5, 0.5])
    samples = mixture_model.sample(batch_size)
    ratio = 0.5 * ((p_model.log_prob(samples) - mixture_model.log_prob(samples)).exp() \
             - (q_model.log_prob(samples) - mixture_model.log_prob(samples)).exp()).abs()
    return ratio.mean()
Exemplo n.º 13
0
def js_divergence(p_model, q_model, batch_size=64):
    M = MixtureModel([p_model, q_model], [0.5, 0.5])
    return 0.5 * (forward_kl(p_model, M, batch_size) +
                  forward_kl(q_model, M, batch_size))
Exemplo n.º 14
0
def test_mixture_cdf():
    model = MixtureModel([Exponential(0.5), Exponential(2.0)], [0.5, 0.5])
    model.cdf(model.sample(100))
    model = GumbelMixtureModel([Exponential(0.5), Exponential(2.0)], [0.5, 0.5])
    model.cdf(model.sample(100))
Exemplo n.º 15
0
from dpm.distributions import (
    MixtureModel, GumbelMixtureModel, InfiniteMixtureModel
)
from dpm.distributions import Normal, Exponential
import numpy as np
import pytest

mm_models = [
    (MixtureModel([Normal(0.0, 1.0), Normal(0.0, 1.0)], [0.5, 0.5]), 1),
    (MixtureModel([Normal([0.0, 0.0], [1.0, 1.0]),
                   Normal([0.0, 0.0], [1.0, 0.0, 0.0, 1.0])], [0.5, 0.5]), 2),
    (GumbelMixtureModel([Normal(0.0, 1.0), Normal(0.0, 1.0)], [0.5, 0.5]), 1),
    (GumbelMixtureModel([Normal([0.0, 0.0], [1.0, 1.0]),
                         Normal([0.0, 0.0], [1.0, 0.0, 0.0, 1.0])], [0.5, 0.5]), 2),
    (GumbelMixtureModel([Normal(0.0, 1.0), Normal(0.0, 1.0)], [0.5, 0.5], hard=False), 1),
    (GumbelMixtureModel([Normal([0.0, 0.0], [1.0, 1.0]),
                         Normal([0.0, 0.0], [1.0, 0.0, 0.0, 1.0])], [0.5, 0.5], hard=False), 2),
]
@pytest.mark.parametrize("model,n_dims", mm_models)
def test_mixture_model(model, n_dims):
    assert model.sample(1).shape == (1, n_dims)
    assert model.sample(64).shape == (64, n_dims)

    assert model.log_prob(model.sample(1)).shape == (1, )
    assert model.log_prob(model.sample(64)).shape == (64, )

    assert (model.get_parameters()['probs'] == np.array([0.5, 0.5])).all()


def test_mixture_cdf():
    model = MixtureModel([Exponential(0.5), Exponential(2.0)], [0.5, 0.5])