def fit(self, X, variational_dist=None, elbo_kwargs={}, **kwargs): if variational_dist is None: variational_dist = PPCA_Variational_V2(self) data = Data(X) stats = train(data, self, ELBO(variational_dist, **elbo_kwargs), **kwargs) return stats
def fit(self, x, **kwargs): data = Data(x) stats = train(data, self.model, self.criterion, optimizer='RMSprop', track_parameters=False, **kwargs) return stats
def test_data_dist(n_dims): data = torch.randn(1000, n_dims) dist = Data(data) assert dist.sample(1).shape == (1, n_dims) assert dist.sample(64).shape == (64, n_dims) try: dist.log_prob(dist.sample(64)) except NotImplementedError: pass assert dist.get_parameters()['n_dims'] == n_dims assert dist.get_parameters()['n_samples'] == 1000 data = np.random.randn(100, n_dims) dist = Data(data) assert dist.sample(1).shape == (1, n_dims) assert dist.sample(64).shape == (64, n_dims) assert dist.get_parameters()['n_dims'] == n_dims assert dist.get_parameters()['n_samples'] == 100
def fit(self, x, **kwargs): data = Data(x) stats = train(data, self.model, cross_entropy, **kwargs) return stats
def fit(self, x, **kwargs): data = Data(x) return train(data, self, self.criterion, **kwargs)
def fit(self, R, **kwargs): data = Data(R.view(-1, self.N * self.M)) stats = train(data, self, cross_entropy, **kwargs) return stats
def fit(self, x, use_elbo=True, **kwargs): data = Data(x) if use_elbo: return train(data, self, self.criterion, **kwargs) return train(data, self, cross_entropy, **kwargs)