def test_seed_same(): """Verifies that two MNDs initialized with the same seed produce the same samples """ rng = np.random.RandomState([1,2,3]) #the number in the argument here is the limit on #seed value seed = rng.randint(2147462579) dim = 3 mu = rng.randn(dim) rank = dim X = rng.randn(rank,dim) cov = np.dot(X.T,X) mnd1 = MND( sigma = cov, mu = mu, seed = seed) num_samples = 5 rd1 = mnd1.random_design_matrix(num_samples) rd1 = function([],rd1)() mnd2 = MND( sigma = cov, mu = mu, seed = seed) rd2 = mnd2.random_design_matrix(num_samples) rd2 = function([],rd2)() assert np.all(rd1 == rd2)
def test_seed_same(): """Verifies that two MNDs initialized with the same seed produce the same samples """ rng = np.random.RandomState([1, 2, 3]) #the number in the argument here is the limit on #seed value seed = rng.randint(2147462579) dim = 3 mu = rng.randn(dim) rank = dim X = rng.randn(rank, dim) cov = np.dot(X.T, X) mnd1 = MND(sigma=cov, mu=mu, seed=seed) num_samples = 5 rd1 = mnd1.random_design_matrix(num_samples) rd1 = function([], rd1)() mnd2 = MND(sigma=cov, mu=mu, seed=seed) rd2 = mnd2.random_design_matrix(num_samples) rd2 = function([], rd2)() assert np.all(rd1 == rd2)
def test_seed_diff(): """Verifies that two MNDs initialized with different seeds produce samples that differ at least somewhat (theoretically the samples could match even under valid behavior but this is extremely unlikely)""" skip_if_no_scipy() rng = np.random.RandomState([1,2,3]) #the number in the argument here is the limit on #seed value, and we subtract 1 so it will be #possible to add 1 to it for the second MND seed = rng.randint(2147462579) -1 dim = 3 mu = rng.randn(dim) rank = dim X = rng.randn(rank,dim) cov = np.dot(X.T,X) mnd1 = MND( sigma = cov, mu = mu, seed = seed) num_samples = 5 rd1 = mnd1.random_design_matrix(num_samples) rd1 = function([],rd1)() mnd2 = MND( sigma = cov, mu = mu, seed = seed + 1) rd2 = mnd2.random_design_matrix(num_samples) rd2 = function([],rd2)() assert np.any(rd1 != rd2)
def test_seed_diff(): """Verifies that two MNDs initialized with different seeds produce samples that differ at least somewhat (theoretically the samples could match even under valid behavior but this is extremely unlikely)""" skip_if_no_scipy() rng = np.random.RandomState([1, 2, 3]) #the number in the argument here is the limit on #seed value, and we subtract 1 so it will be #possible to add 1 to it for the second MND seed = rng.randint(2147462579) - 1 dim = 3 mu = rng.randn(dim) rank = dim X = rng.randn(rank, dim) cov = np.dot(X.T, X) mnd1 = MND(sigma=cov, mu=mu, seed=seed) num_samples = 5 rd1 = mnd1.random_design_matrix(num_samples) rd1 = function([], rd1)() mnd2 = MND(sigma=cov, mu=mu, seed=seed + 1) rd2 = mnd2.random_design_matrix(num_samples) rd2 = function([], rd2)() assert np.any(rd1 != rd2)
betas = 10**scaled_shifted kls = np.zeros((trials, num_beta)) ml_kls = np.zeros((trials, )) for trial in xrange(trials): #generate the data data_distribution = MND(sigma=np.identity(dim) / true_beta, mu=np.zeros((dim, )), seed=17 * (trial + 1)) true = DiagonalMND(nvis=dim, init_beta=true_beta, init_mu=0., min_beta=.1, max_beta=10.) X = sharedX(function([], data_distribution.random_design_matrix(m))()) Xv = X.get_value() mu = Xv.mean(axis=0) print 'maximum likelihood mu: ', mu diff = Xv - mu var = np.square(diff).mean(axis=0) mlbeta = 1. / var print 'maximum likelihood beta: ', mlbeta ml_model = DiagonalMND(nvis=dim, init_mu=mu, init_beta=mlbeta, min_beta=0.0, max_beta=1e6) ml_kl = kl_divergence(true, ml_model) ml_kl = function([], ml_kl)()
idxs = np.arange(num_beta) pos = idxs / float(num_beta-1) scaled_shifted = pos * (max_exp-min_exp) + min_exp betas = 10 ** scaled_shifted kls = np.zeros((trials,num_beta)) ml_kls = np.zeros((trials,)) for trial in xrange(trials): #generate the data data_distribution = MND( sigma = np.identity(dim) / true_beta, mu = np.zeros((dim,)), seed = 17 * (trial+1) ) true = DiagonalMND( nvis = dim, init_beta = true_beta, init_mu = 0., min_beta = .1, max_beta = 10.) X = sharedX(function([],data_distribution.random_design_matrix(m))()) Xv = X.get_value() mu = Xv.mean(axis=0) print 'maximum likelihood mu: ',mu diff = Xv - mu var = np.square(diff).mean(axis=0) mlbeta = 1./var print 'maximum likelihood beta: ',mlbeta ml_model = DiagonalMND( nvis = dim, init_mu = mu, init_beta = mlbeta, min_beta = 0.0, max_beta = 1e6) ml_kl = kl_divergence( true, ml_model) ml_kl = function([],ml_kl)() assert ml_kl >= 0.0 ml_kls[trial] = ml_kl