def _initialise_model(self): data = self.data (num_obs, num_features) = np.shape(data) init_params = self.options.init_params options = self.options # Initialize data structures self.prior = _Prior(self.data, options.num_comp_init, options.prior_dirichlet) self.ess = _ESS(data, options.num_comp_init, init_params['mean'], init_params['covar'], init_params['mixweights']) self.latent_variables = _LatentVariables(data, self.ess, options.num_comp_init) self.posterior = _Posterior(self.prior, options.num_comp_init, options.dof_init, use_approx=options.use_approx) self.lower_bound = _LowerBound(data, num_obs, num_features, options.num_comp_init, self.prior)
def __init__(self, data, num_comp_init=10, max_iter=200, thresh=1e-5, verbose=False, init_mean=None, init_covar=None, init_mixweights=None, init_method='d2-weighting', prior_dirichlet=1e-3, dof_init=2, remove_comp_thresh=1e-2, whiten_data=False, plot_monitor=False, use_approx=True): """Fit the model to the data using Variational Bayes """ (num_obs, num_features) = np.shape(data) if whiten_data: data = whiten(data) self.data = data self.remove_comp_thresh = remove_comp_thresh # Choose method to intialize the parameters if init_method == 'd2-weighting': init_method = self._init_d2_weighting elif init_method == 'kmeans': init_method = self._init_kmeans elif init_method == 'random': init_method = self._init_random if init_mean is None: # No starting solution was supplied # Initialize with `init_method` (init_mean, labels, init_covar, init_mixweights) = \ init_method(num_comp_init) else: # Starting solution was supplied num_comp_init = init_mean.shape[0] if init_mixweights is None: labels = classify_by_distance(data, init_mean, init_covar) init_mixweights = element_weights(labels) if init_covar is None: init_covar = self._get_covar(data, labels) # Initialize data structures Prior = _Prior(data, num_comp_init, prior_dirichlet) ESS = _ESS(data, num_comp_init, init_mean, init_covar, init_mixweights) LatentVariables = _LatentVariables(data, ESS, num_comp_init) Posterior = _Posterior(Prior, num_comp_init, dof_init, use_approx=use_approx) LowerBound = _LowerBound(data, num_obs, num_features, num_comp_init, Prior) # Initial M-step Posterior.update_parameters(Prior, ESS, LatentVariables) # Main loop iteration = 1 done = False if plot_monitor: self._plot_monitor_init() while not done: # Update parameters self._update_step(Prior, Posterior, ESS, LatentVariables, LowerBound) # Converged? if iteration == 1: converged = False else: converged = self._convergence_test(LowerBound, thresh) done = converged or (iteration >= max_iter) if plot_monitor: self._plot_monitor_update(ESS) if verbose: print('iteration %d, lower bound: %f' % (iteration, LowerBound.lower_bound[-1])) print Posterior.nws_mean iteration += 1 self.Posterior = Posterior self.Prior = Prior self.LatentVariables = LatentVariables self.ESS = ESS self.LowerBound = LowerBound # Call main loop of wxFrame to keep the window from closing if plot_monitor: self.frame.end_of_iteration() self.app.MainLoop()