def test_loglikelihood(self): """ Tests whether 1-dimensional GSMs are normalized. Tests the log-likelihood of several instantiations of the GSM. """ # check whether the log-likelihood of 1D GSMs is normalized for num_scales in [1, 2, 3, 4, 5]: model = GSM(1, num_scales=num_scales) # implied probability density of model pdf = lambda x: exp(model.loglikelihood(array(x).reshape(1, -1))) # compute normalization constant and upper bound on error partf, err = integrate.quad(pdf, -inf, inf) self.assertTrue(partf - err <= 1.) self.assertTrue(partf + err >= 1.) # test the log-likelihood of a couple of GSMs for dim in [1, 2, 3, 4, 5]: for num_scales in [1, 2, 3, 4, 5]: # create Gaussian scale mixture model = GSM(dim, num_scales=num_scales) scales = model.scales.reshape(-1, 1) # create random data data = randn(model.dim, 100) # evaluate likelihood ll = logmeanexp( -0.5 * sum(square(data), 0) / square(scales) - model.dim * log(scales) - model.dim / 2. * log(2. * pi), 0) self.assertTrue(all(abs(ll - model.loglikelihood(data)) < 1E-6)) # random scales scales = rand(num_scales, 1) + 0.5 model.scales[:] = scales.flatten() # sample data from model data = model.sample(100) # evaluate likelihood ll = logmeanexp( -0.5 * sum(square(data), 0) / square(scales) - model.dim * log(scales) - model.dim / 2. * log(2. * pi), 0) self.assertTrue(all(abs(ll - model.loglikelihood(data)) < 1E-6))
def test_loglikelihood(self): """ Tests whether 1-dimensional GSMs are normalized. Tests the log-likelihood of several instantiations of the GSM. """ # check whether the log-likelihood of 1D GSMs is normalized for num_scales in [1, 2, 3, 4, 5]: model = GSM(1, num_scales=num_scales) # implied probability density of model pdf = lambda x: exp(model.loglikelihood(array(x).reshape(1, -1))) # compute normalization constant and upper bound on error partf, err = integrate.quad(pdf, -inf, inf) self.assertTrue(partf - err <= 1.) self.assertTrue(partf + err >= 1.) # test the log-likelihood of a couple of GSMs for dim in [1, 2, 3, 4, 5]: for num_scales in [1, 2, 3, 4, 5]: # create Gaussian scale mixture model = GSM(dim, num_scales=num_scales) scales = model.scales.reshape(-1, 1) # create random data data = randn(model.dim, 100) # evaluate likelihood ll = logmeanexp( -0.5 * sum(square(data), 0) / square(scales) - model.dim * log(scales) - model.dim / 2. * log(2. * pi), 0) self.assertTrue( all(abs(ll - model.loglikelihood(data)) < 1E-6)) # random scales scales = rand(num_scales, 1) + 0.5 model.scales[:] = scales.flatten() # sample data from model data = model.sample(100) # evaluate likelihood ll = logmeanexp( -0.5 * sum(square(data), 0) / square(scales) - model.dim * log(scales) - model.dim / 2. * log(2. * pi), 0) self.assertTrue( all(abs(ll - model.loglikelihood(data)) < 1E-6))
def test_logjacobian(self): """ Test log-Jacobian. """ gsm = GSM(3, 10) gsm.initialize('cauchy') # standard normal distribution gauss = GSM(3, 1) gauss.scales[0] = 1. # generate test data samples = gsm.sample(100) rg = RadialGaussianization(gsm) # after Gaussianization, samples should be Gaussian distributed loglik_gsm = gsm.loglikelihood(samples) loglik_gauss = gauss.loglikelihood( rg(samples)) + rg.logjacobian(samples) dist = abs(loglik_gsm - loglik_gauss) self.assertTrue(all(dist < 1E-6)) ### # test one-dimensional Gaussian gsm = GSM(1, 10) gsm.initialize('cauchy') # standard normal distribution gauss = GSM(1, 1) gauss.scales[0] = 1. # generate test data samples = gsm.sample(100) rg = RadialGaussianization(gsm) # after Gaussianization, samples should be Gaussian distributed loglik_gsm = gsm.loglikelihood(samples) loglik_gauss = gauss.loglikelihood( rg(samples)) + rg.logjacobian(samples) dist = abs(loglik_gsm - loglik_gauss) self.assertTrue(all(dist < 1E-6))
def test_logjacobian(self): isa = ISA(4, 4, 2) # standard normal distribution gauss = GSM(4, 1) gauss.scales[0] = 1. # generate test data samples = isa.sample(100) sg = SubspaceGaussianization(isa) # after Gaussianization, samples should be Gaussian distributed loglik_isa = isa.loglikelihood(samples) loglik_gauss = gauss.loglikelihood( sg(samples)) + sg.logjacobian(samples) dist = abs(loglik_isa - loglik_gauss) self.assertTrue(all(dist < 1E-6)) ### # test ICA isa = ISA(3, 3, 1) # standard normal distribution gauss = GSM(3, 1) gauss.scales[0] = 1. # generate test data samples = isa.sample(100) sg = SubspaceGaussianization(isa) # after Gaussianization, samples should be Gaussian distributed loglik_isa = isa.loglikelihood(samples) loglik_gauss = gauss.loglikelihood( sg(samples)) + sg.logjacobian(samples) dist = abs(loglik_isa - loglik_gauss) self.assertTrue(all(dist < 1E-6))
def test_logjacobian(self): ica = ICA(4) # standard normal distribution gauss = GSM(4, 1) gauss.scales[0] = 1. # generate test data samples = ica.sample(100) mg = MarginalGaussianization(ica) # after Gaussianization, samples should be Gaussian distributed loglik_ica = ica.loglikelihood(samples) loglik_gauss = gauss.loglikelihood(mg(samples)) + mg.logjacobian(samples) dist = abs(loglik_ica - loglik_gauss) self.assertTrue(all(dist < 1E-6))
def test_sample(self): """ Compares model density with histogram obtained from samples. """ model = GSM(1, 3) model.scales = array([1., 3., 8.]) data = model.sample(50000) try: hist, x = histogram(data, 100, density=True) except: # use deprecated method with older versions of Python hist, x = histogram(data, 100, normed=True) x = (x[1:] + x[:-1]) / 2. pdf = exp(model.loglikelihood(x.reshape(1, -1))) self.assertTrue(all(abs(pdf - hist) < 1E-1))
def test_logjacobian(self): ica = ICA(4) # standard normal distribution gauss = GSM(4, 1) gauss.scales[0] = 1. # generate test data samples = ica.sample(100) mg = MarginalGaussianization(ica) # after Gaussianization, samples should be Gaussian distributed loglik_ica = ica.loglikelihood(samples) loglik_gauss = gauss.loglikelihood( mg(samples)) + mg.logjacobian(samples) dist = abs(loglik_ica - loglik_gauss) self.assertTrue(all(dist < 1E-6))