def _job(kwargs): args = kwargs.pop('arguments') seed = kwargs.pop('seed') input = kwargs.pop('train_input') target = kwargs.pop('train_target') input_dim = input.shape[-1] target_dim = target.shape[-1] # set random seed np.random.seed(seed) nb_params = input_dim if args.affine: nb_params += 1 basis_prior = [] models_prior = [] models_hypprior = [] # initialize Normal alpha_ng = 1. beta_ng = 1. / (2. * 1e2) kappas = 1e-2 # initialize Matrix-Normal psi_mnw = 1e0 K = 1e0 # initialize ard-Gamma alphas_ard = 1. betas_ard = 1. / (2. * 1e2) for n in range(args.nb_models): basis_hypparams = dict(mu=np.zeros((input_dim, )), alphas=np.ones(input_dim) * alpha_ng, betas=np.ones(input_dim) * beta_ng, kappas=np.ones(input_dim) * kappas) aux = NormalGamma(**basis_hypparams) basis_prior.append(aux) models_hypparams = dict(M=np.zeros((target_dim, nb_params)), K=np.eye(nb_params) * K, nu=target_dim + 1, psi=np.eye(target_dim) * psi_mnw) aux = MatrixNormalWishart(**models_hypparams) models_prior.append(aux) models_hyphypparams = dict(alphas=alphas_ard * np.ones(nb_params), betas=betas_ard * np.ones(nb_params)) aux = Gamma(**models_hyphypparams) models_hypprior.append(aux) # define gating if args.prior == 'stick-breaking': gating_hypparams = dict(K=args.nb_models, gammas=np.ones((args.nb_models, )), deltas=np.ones( (args.nb_models, )) * args.alpha) gating_prior = TruncatedStickBreaking(**gating_hypparams) ilr = BayesianMixtureOfLinearGaussians( gating=CategoricalWithStickBreaking(gating_prior), basis=[ GaussianWithNormalGamma(basis_prior[i]) for i in range(args.nb_models) ], models=[ LinearGaussianWithMatrixNormalWishartAndAutomaticRelevance( models_prior[i], models_hypprior[i], affine=args.affine) for i in range(args.nb_models) ]) else: gating_hypparams = dict(K=args.nb_models, alphas=np.ones( (args.nb_models, )) * args.alpha) gating_prior = Dirichlet(**gating_hypparams) ilr = BayesianMixtureOfLinearGaussians( gating=CategoricalWithDirichlet(gating_prior), basis=[ GaussianWithNormalGamma(basis_prior[i]) for i in range(args.nb_models) ], models=[ LinearGaussianWithMatrixNormalWishartAndAutomaticRelevance( models_prior[i], models_hypprior[i], affine=args.affine) for i in range(args.nb_models) ]) ilr.add_data(target, input, whiten=False, labels_from_prior=True) # Gibbs sampling ilr.resample(maxiter=args.gibbs_iters, progprint=args.verbose) for _ in range(args.super_iters): if args.stochastic: # Stochastic meanfield VI ilr.meanfield_stochastic_descent(maxiter=args.svi_iters, stepsize=args.svi_stepsize, batchsize=args.svi_batchsize) if args.deterministic: # Meanfield VI ilr.meanfield_coordinate_descent(tol=args.earlystop, maxiter=args.meanfield_iters, progprint=args.verbose) ilr.gating.prior = ilr.gating.posterior for i in range(ilr.likelihood.size): ilr.basis[i].prior = ilr.basis[i].posterior ilr.models[i].prior = ilr.models[i].posterior return ilr
import numpy as np import numpy.random as npr from mimo.distributions import GaussianWithDiagonalCovariance from mimo.distributions import NormalGamma from mimo.distributions import GaussianWithNormalGamma npr.seed(1337) dim, nb_samples, nb_datasets = 3, 500, 5 dist = GaussianWithDiagonalCovariance(mu=npr.randn(dim), sigmas=1. * npr.rand(dim)) data = [dist.rvs(size=nb_samples) for _ in range(nb_datasets)] print("True mean" + "\n", dist.mu.T, "\n" + "True sigma" + "\n", dist.sigma) hypparams = dict(mu=np.zeros((dim, )), kappas=1e-2 * np.ones((dim, )), alphas=1. * np.ones((dim, )), betas=1. / 2. * np.ones((dim, ))) prior = NormalGamma(**hypparams) model = GaussianWithNormalGamma(prior=prior) model.meanfield_update(data) print("Meanfield mean" + "\n", model.likelihood.mu.T, "\n" + "Meanfield sigma" + "\n", model.likelihood.sigma)
gating = Categorical(K=2) components = [GaussianWithDiagonalCovariance(mu=np.array([1., 1.]), sigmas=np.array([0.25, 0.5])), GaussianWithDiagonalCovariance(mu=np.array([-1., -1.]), sigmas=np.array([0.5, 0.25]))] gmm = MixtureOfGaussians(gating=gating, components=components) obs, z = gmm.rvs(500) gmm.plot(obs) gating_hypparams = dict(K=2, alphas=np.ones((2, ))) gating_prior = Dirichlet(**gating_hypparams) components_hypparams = dict(mu=np.zeros((2, )), kappas=1e-2 * np.ones((2, )), alphas=1. * np.ones((2, )), betas=1. / (2. * 1e4) * np.ones((2, ))) components_prior = NormalGamma(**components_hypparams) model = BayesianMixtureOfGaussians(gating=CategoricalWithDirichlet(gating_prior), components=[GaussianWithNormalGamma(components_prior) for _ in range(2)]) model.add_data(obs) model.max_aposteriori(maxiter=1000) plt.figure() model.plot(obs)
import numpy as np import numpy.random as npr from mimo.distributions import GaussianWithDiagonalCovariance from mimo.distributions import NormalGamma from mimo.distributions import GaussianWithNormalGamma npr.seed(1337) dim, nb_samples, nb_datasets = 3, 500, 5 dist = GaussianWithDiagonalCovariance(mu=npr.randn(dim), sigmas=1. * npr.rand(dim)) data = [dist.rvs(size=nb_samples) for _ in range(nb_datasets)] print("True mean" + "\n", dist.mu.T, "\n" + "True sigma" + "\n", dist.sigma) model = GaussianWithDiagonalCovariance(mu=np.zeros((dim, ))) model.max_likelihood(data) print("ML mean" + "\n", model.mu.T, "\n" + "ML sigma" + "\n", model.sigma) hypparams = dict(mu=np.zeros((dim, )), kappas=1e-2 * np.ones((dim, )), alphas=1. * np.ones((dim, )), betas=1. / 2. * np.ones((dim, ))) prior = NormalGamma(**hypparams) model = GaussianWithNormalGamma(prior=prior) model.max_aposteriori(data) print("MAP mean" + "\n", model.likelihood.mu.T, "\n" + "MAP sigma" + "\n", model.likelihood.sigma)
components = [ GaussianWithCovariance(mu=np.array([1., 1.]), sigma=0.25 * np.eye(2)), GaussianWithCovariance(mu=np.array([-1., -1.]), sigma=0.5 * np.eye(2)) ] gmm = MixtureOfGaussians(gating=gating, components=components) obs, z = gmm.rvs(1000) gmm.plot(obs) gating_hypparams = dict(K=2, alphas=np.ones((2, ))) gating_prior = Dirichlet(**gating_hypparams) components_hypparams = dict(mu=np.zeros((2, )), kappas=1e-2 * np.ones((2, )), alphas=1. * np.ones((2, )), betas=1. / (2. * 1e4) * np.ones((2, ))) components_prior = NormalGamma(**components_hypparams) model = BayesianMixtureOfGaussians( gating=CategoricalWithDirichlet(gating_prior), components=[GaussianWithNormalGamma(components_prior) for _ in range(2)]) model.add_data(obs) model.resample(maxiter=500) plt.figure() model.plot(obs)
import numpy as np import numpy.random as npr from mimo.distributions import GaussianWithDiagonalCovariance from mimo.distributions import NormalGamma from mimo.distributions import GaussianWithNormalGamma npr.seed(1337) dim, nb_samples, nb_datasets = 3, 500, 5 dist = GaussianWithDiagonalCovariance(mu=npr.randn(dim), sigmas=1. * npr.rand(dim)) data = [dist.rvs(size=nb_samples) for _ in range(nb_datasets)] print("True mean" + "\n", dist.mu.T, "\n" + "True sigma" + "\n", dist.sigma) # hypparams = dict(mu=np.zeros((dim, )), kappas=1e-2 * np.ones((dim, )), alphas=1. * np.ones((dim, )), betas=1. / 2. * np.ones((dim, ))) prior = NormalGamma(**hypparams) model = GaussianWithNormalGamma(prior=prior) model.resample(data) print("Gibbs mean" + "\n", model.likelihood.mu.T, "\n" + "Gibbs sigma" + "\n", model.likelihood.sigma)