def check_e_step(): """Check the E-step updates by making sure they maximize the variational objective with respect to the responsibilities. Note that this does not fully check your solution to Part 2, since it only applies to fully observed images.""" np.random.seed(0) NUM_IMAGES = 100 X = util.read_mnist_images(mixture.TRAIN_IMAGES_FILE) X = X[:NUM_IMAGES, :] model = mixture.train_from_labels(show=False) # reduce the number of observations so that the posterior is less peaked X = X[:, ::50] model.params.theta = model.params.theta[:, ::50] R = model.compute_posterior(X) opt = variational_objective(model, X, R, model.params.pi, model.params.theta) if not np.allclose(R.sum(1), 1.): print 'Uh-oh. Rows of R do not seem to sum to 1.' else: ok = True for i in range(20): new_R = perturb_R(R) new_obj = variational_objective(model, X, new_R, model.params.pi, model.params.theta) if new_obj > opt: ok = False if ok: print 'The E-step seems OK.' else: print 'Something seems to be wrong with the E-step.'
ok = True for i in range(20): new_R = perturb_R(R) new_obj = variational_objective(model, X, new_R, model.params.pi, model.params.theta) if new_obj > opt: ok = False if ok: print('The E-step seems OK.') else: print('Something seems to be wrong with the E-step.') if __name__ == '__main__': check_e_step() check_m_step() print("Part 1 values:") mixture.print_part_1_values() print("Part 2 values:") mixture.print_part_2_values() # uses train_from_labels print("Training with labels()") model = mixture.train_from_labels() print("log_probs_by_digit_class") mixture.print_log_probs_by_digit_class(model) print("Training with em()") mixture.train_with_em() pylab.show()