Ejemplo n.º 1
0
 def __init__(self, n_classes=2, n_features=10, n_states=4):
     super().__init__(Categorical(probs=[1.0/n_classes for _ in range(n_classes)]),
                     [Categorical(probs=[[0.5 for _ in range(n_states)]
                                         for _ in range(n_features)])
                                         for _ in range(n_classes)])
     self.n_classes = n_classes
     self.n_features = n_features
     self.n_states = n_states
Ejemplo n.º 2
0
 def __init__(self, n_classes=2, n_features=10):
     super().__init__()
     self.y_dist = Categorical(probs=[1.0/n_classes for _ in range(n_classes)])
     self.x_means = Parameter(torch.randn(n_classes, n_features).float())
     self.scale = Parameter(torch.eye(n_features).float().cholesky())
     self.n_dims = n_features
     self.n_classes = n_classes
     self.n_features = n_features
Ejemplo n.º 3
0
class LinearDiscriminantAnalysis(Distribution):

    def __init__(self, n_classes=2, n_features=10):
        super().__init__()
        self.y_dist = Categorical(probs=[1.0/n_classes for _ in range(n_classes)])
        self.x_means = Parameter(torch.randn(n_classes, n_features).float())
        self.scale = Parameter(torch.eye(n_features).float().cholesky())
        self.n_dims = n_features
        self.n_classes = n_classes
        self.n_features = n_features

    def create_dist(self, class_num):
        return Normal(self.x_means[class_num], self.covariance, learnable=False)

    def log_prob(self, x, y):
        ids = y.long()
        log_probs = torch.cat([self.create_dist(i).log_prob(x).view(-1, 1)
                                 for i in range(self.n_classes)], dim=1)
        y_probs = self.y_dist.log_prob(y).view(-1, 1)
        return (y_probs + log_probs.gather(1, ids.view(-1, 1))).sum(-1)

    def sample(self, batch_size, return_y=False):
        indices = self.y_dist.sample(batch_size).view(-1).long()
        samples = torch.stack([self.create_dist(i).sample(batch_size)
                               for i in range(self.n_classes)])
        # if you want class, return indicies as well
        if return_y:
            return samples[indices, np.arange(batch_size)], indices.view(-1, 1)
        return samples[indices, np.arange(batch_size)]

    def fit(self, x, y, **kwargs):
        data = Data(x, y)
        stats = train(data, self, cross_entropy, **kwargs)
        return stats

    def predict(self, x):
        log_probs = torch.cat([self.create_dist(i).log_prob(x).view(-1, 1)
                                 for i in range(self.n_classes)], dim=1)
        y_probs = self.y_dist.logits.expand_as(log_probs)
        probs = y_probs + log_probs
        return probs.max(dim=1)[1].view(-1, 1)

    @property
    def covariance(self):
        return torch.mm(self.scale, self.scale.t())
Ejemplo n.º 4
0
 def __init__(self,
              n_components=2,
              n_dims=1,
              variational_kwargs={},
              elbo_kwargs={}):
     super().__init__()
     self.n_components = n_components
     self.n_dims = n_dims
     self.normals = ModuleList([
         Normal(torch.randn(n_dims), torch.eye(n_dims))
         for _ in range(n_components)
     ])
     variational_kwargs.update({
         'input_dim': n_dims,
         'output_shapes': [n_components]
     })
     self.variational_kwargs = variational_kwargs
     self.elbo_kwargs = elbo_kwargs
     self.categorical = VariationalCategorical(variational_kwargs)
     self.criterion = ELBO(self.categorical, **elbo_kwargs)
     self.prior = Categorical(
         [1.0 / n_components for _ in range(n_components)], learnable=False)
Ejemplo n.º 5
0
class VariationalGaussianMixtureModel(Distribution):

    has_latents = True

    def __init__(self,
                 n_components=2,
                 n_dims=1,
                 variational_kwargs={},
                 elbo_kwargs={}):
        super().__init__()
        self.n_components = n_components
        self.n_dims = n_dims
        self.normals = ModuleList([
            Normal(torch.randn(n_dims), torch.eye(n_dims))
            for _ in range(n_components)
        ])
        variational_kwargs.update({
            'input_dim': n_dims,
            'output_shapes': [n_components]
        })
        self.variational_kwargs = variational_kwargs
        self.elbo_kwargs = elbo_kwargs
        self.categorical = VariationalCategorical(variational_kwargs)
        self.criterion = ELBO(self.categorical, **elbo_kwargs)
        self.prior = Categorical(
            [1.0 / n_components for _ in range(n_components)], learnable=False)

    def log_prob(self, X, Z=None, n_iter=10):
        if Z is None:

            # Z = self.categorical.sample(X.expand(n_iter, *X.shape))
            # print(Z.shape)
            # raise ValueError()
            Z = self.categorical.sample(X)
            for _ in range(n_iter - 1):
                Z = Z + self.categorical.sample(X)
            Z = Z / n_iter
        latent_probs = self.prior.log_prob(Z)
        log_probs = torch.stack(
            [sub_model.log_prob(X) for sub_model in self.normals], dim=1)
        return (log_probs * Z).sum(dim=-1) + latent_probs

    def sample(self, batch_size):
        indices = self.prior.sample(batch_size).view(-1).long()
        samples = torch.stack(
            [sub_model.sample(batch_size) for sub_model in self.normals])
        return samples[indices, np.arange(batch_size)]

    def fit(self, x, **kwargs):
        data = Data(x)
        return train(data, self, self.criterion, **kwargs)

    def predict(self, x):
        log_probs = torch.stack(
            [sub_model.log_prob(x) for sub_model in self.normals])
        _, labels = log_probs.max(dim=0)
        return labels

    def parameters(self):
        for name, param in self.named_parameters(recurse=True):
            if 'categorical' in name:
                continue
            yield param
Ejemplo n.º 6
0
 (StudentT(30., 1., 3.), 1),
 (Dirichlet(0.5), 1),
 (FisherSnedecor(10., 10.), 1),
 (HalfCauchy(1.), 1),
 (HalfNormal(1.), 1),
 (Laplace(0., 1.), 1),
 (MixtureModel([Normal(0., 1.), Normal(1., 3.)], [0.25, 0.75]), 1),
 (GumbelMixtureModel([Normal(0., 1.), Normal(1., 3.)], [0.25, 0.75]), 1),
 (GumbelMixtureModel([Normal(0., 1.), Normal(1., 3.)], [0.25, 0.75],
                     hard=False), 1),
 (ChiSquare(4.), 1),
 (Logistic(0., 1.), 1),
 (Rayleigh(), 1),
 (LogLaplace(), 1),
 (LogCauchy(), 1),
 (Categorical(), 1),
 (HyperbolicSecant(), 1),
 (Arcsine(), 1),
 (Bernoulli(), 1),
 (Gumbel(), 1),
 (Rayleigh(), 1),
 (Arcsine(), 1),
 (Categorical(), 1),
 (LogitNormal(), 1),
 (AsymmetricLaplace(), 1),
 (AsymmetricLaplace(asymmetry=2.), 1),
 (Normal([0., 0.], [1., 0., 0., 1.0]), 2),
 (Exponential([0.5, 1.0]), 2),
 (Cauchy([0., 0.], [1., 1.]), 2),
 (Beta([0.5, 0.5], [1., 1.]), 2),
 (LogNormal([0., 0.], [1., 1.]), 2),
Ejemplo n.º 7
0
 def __init__(self, n_classes=2, n_features=10):
     super().__init__(Categorical(probs=[1.0/n_classes for _ in range(n_classes)]),
                      [Normal(loc=torch.randn(n_features), scale=torch.ones(n_features)) for _ in range(n_classes)])
     self.n_classes = n_classes
     self.n_features = n_features