def run_mstep_test_input(self, test):
        X, K, expected_mixture, post = test.data()
        mixture = em.mstep(X, post, expected_mixture)

        with self.subTest(msg='Check mu'):
            self.assertEqual(np.allclose(mixture.mu, expected_mixture.mu), True, f'mu not as expected.\nExpected: {expected_mixture.mu}\nGot:{mixture.mu}')

        with self.subTest(msg='Check var'):
            self.assertEqual(np.allclose(mixture.var, expected_mixture.var), True, f'var not as expected.\nExpected: {expected_mixture.var}\nGot:{mixture.var}')

        with self.subTest(msg='Check p'):
            self.assertEqual(np.allclose(mixture.p, expected_mixture.p), True, f'p not as expected.\nExpected: {expected_mixture.p}\nGot:{mixture.p}')
    def test_mstep_test_solution(self):
        expected_mixture = ts.mixture_first_mstep
        mixture = em.mstep(ts.X, ts.post_first_estep, ts.mixture_initial)

        with self.subTest(msg='Check mu'):
            self.assertEqual(np.allclose(mixture.mu, expected_mixture.mu), True, f'mu not as expected.\nExpected: {expected_mixture.mu}\nGot:{mixture.mu}')

        with self.subTest(msg='Check var'):
            self.assertEqual(np.allclose(mixture.var, expected_mixture.var), True, f'var not as expected.\nExpected: {expected_mixture.var}\nGot:{mixture.var}')

        with self.subTest(msg='Check p'):
            self.assertEqual(np.allclose(mixture.p, expected_mixture.p), True, f'p not as expected.\nExpected: {expected_mixture.p}\nGot:{mixture.p}')
Beispiel #3
0
# print("Input:")
# print('X:\n' + str(X))
# print('K: ' + str(K))
# print('Mu:\n' + str(mixture.mu))
# print('Var: ' + str(mixture.var))
# print('P: ' + str(mixture.p))
# print()

# print("After first E-step:")
post, ll = em.estep(X, mixture)
# print('post:\n' + str(post))
# print('LL:' + str(ll))
# print()

# print("After first M-step:")
mu, var, p = em.mstep(X, post, mixture)
# print('Mu:\n' + str(mu))
# print('Var: ' + str(var))
# print('P: ' + str(p))
# print()

# print("After a run")
(mu, var, p), post, ll = em.run(X, mixture, post)
# print('Mu:\n' + str(mu))
# print('Var: ' + str(var))
# print('P: ' + str(p))
# print('post:\n' + str(post))
# print('LL: ' + str(ll))
X_pred = em.fill_matrix(X, common.GaussianMixture(mu, var, p))
# error = common.rmse(X_gold, X_pred)
# print("X_gold:\n" + str(X_gold))
Beispiel #4
0
import numpy as np
import em
import common

X = np.loadtxt("test_incomplete.txt")
X_gold = np.loadtxt("test_complete.txt")

K = 4
n, d = X.shape
seed = 0

initial_mixture, post = common.init(X, K, seed)
print(X)
print(initial_mixture)
post, LL = em.estep(X, initial_mixture)
print(post)
print(LL)

new_mixture = em.mstep(X, post, initial_mixture, 0.25)
print(new_mixture)
Beispiel #5
0
# =============================================================================
# 7. Implementing EM for matrix completion
# =============================================================================

X = np.loadtxt("test_incomplete.txt")
X_gold = np.loadtxt("test_complete.txt")

K = 4
n, d = X.shape
seed = 0

# for incomplete case
mixture, post = common.init(X, K=K, seed=seed)
post, log_likelihood = em.estep(X, mixture)
mixture = em.mstep(X, post, mixture)

# =============================================================================
# 8. Using the mixture model for collaborative filtering
# Reporting log likelihood values on Netflix data
# =============================================================================

X = np.loadtxt("netflix_incomplete.txt")

mixture, post = common.init(X, K=1, seed=0)
post, log_likelihood = em.estep(X, mixture)
mixtured = em.mstep(X, post, mixture)

Ks = [1, 12]
seeds = [0, 1, 2, 3, 4]
import numpy as np
import em
import common
from scipy.special import logsumexp

X = np.loadtxt("test_incomplete.txt")
X_gold = np.loadtxt("test_complete.txt")

K = 4
n, d = X.shape
seed = 0

mixture, post = common.init(X, 4, 0)
# TODO: Your code here


post, log_likelihood = em.estep(X, mixture)



print(em.mstep(X, post, mixture))