def test_sample_confidence_region(): """Test sampling of confidence region.""" random_state = check_random_state(42) mvn = MVN(mean=np.array([1.0, 2.0]), covariance=np.array([[1.0, 0.0], [0.0, 4.0]]), random_state=random_state) samples = mvn.sample_confidence_region(100, 0.9) for sample in samples: assert_true(mvn.is_in_confidence_region(sample, 0.9))
def test_marginal_distribution(): random_state = check_random_state(0) mean = np.array([0.0, 1.0]) covariance = np.array([[0.5, -1.0], [-1.0, 5.0]]) mvn = MVN(mean=mean, covariance=covariance, random_state=random_state) marginalized = mvn.marginalize(np.array([0])) assert_equal(marginalized.mean, np.array([0.0])) assert_equal(marginalized.covariance, np.array([0.5]))
def test_probability_density(): """Test PDF of MVN.""" random_state = check_random_state(0) mvn = MVN(mean, covariance, random_state=random_state) x = np.linspace(-100, 100, 201) X = np.vstack(map(np.ravel, np.meshgrid(x, x))).T p = mvn.to_probability_density(X) approx_int = np.sum(p) * ((x[-1] - x[0]) / 201)**2 assert_less(np.abs(1.0 - approx_int), 0.01)
def test_probability_density(): """Test PDF of MVN.""" random_state = check_random_state(0) mvn = MVN(mean, covariance, random_state=random_state) x = np.linspace(-100, 100, 201) X = np.vstack(map(np.ravel, np.meshgrid(x, x))).T p = mvn.to_probability_density(X) approx_int = np.sum(p) * ((x[-1] - x[0]) / 201) ** 2 assert_less(np.abs(1.0 - approx_int), 0.01)
def test_marginal_distribution(): """Test moments from marginal MVN.""" random_state = check_random_state(0) mvn = MVN(mean=mean, covariance=covariance, random_state=random_state) marginalized = mvn.marginalize(np.array([0])) assert_equal(marginalized.mean, np.array([0.0])) assert_equal(marginalized.covariance, np.array([0.5])) marginalized = mvn.marginalize(np.array([1])) assert_equal(marginalized.mean, np.array([1.0])) assert_equal(marginalized.covariance, np.array([5.0]))
def test_gmm_to_mvn_vs_mvn(): random_state = check_random_state(0) gmm = GMM(n_components=2, random_state=random_state) gmm.from_samples(X) mvn_from_gmm = gmm.to_mvn() mvn = MVN(random_state=random_state) mvn.from_samples(X) assert_array_almost_equal(mvn_from_gmm.mean, mvn.mean) assert_array_almost_equal(mvn_from_gmm.covariance, mvn.covariance, decimal=3)
def test_ellipse(): """Test equiprobable ellipse.""" random_state = check_random_state(0) mean = np.array([0.0, 1.0]) covariance = np.array([[0.5, 0.0], [0.0, 5.0]]) mvn = MVN(mean=mean, covariance=covariance, random_state=random_state) angle, width, height = mvn.to_ellipse() assert_equal(angle, 0.5 * np.pi) assert_equal(width, np.sqrt(5.0)) assert_equal(height, np.sqrt(0.5))
def test_conditional_distribution(): """Test moments from conditional MVN.""" random_state = check_random_state(0) mean = np.array([0.0, 1.0]) covariance = np.array([[0.5, 0.0], [0.0, 5.0]]) mvn = MVN(mean=mean, covariance=covariance, random_state=random_state) conditional = mvn.condition(np.array([1]), np.array([5.0])) assert_equal(conditional.mean, np.array([0.0])) assert_equal(conditional.covariance, np.array([0.5])) conditional = mvn.condition(np.array([0]), np.array([0.5])) assert_equal(conditional.mean, np.array([1.0])) assert_equal(conditional.covariance, np.array([5.0]))
def test_unscented_transform_linear_combination(): """Test unscented transform with a linear combination.""" mvn = MVN(mean=np.zeros(2), covariance=np.eye(2), random_state=42) points = mvn.sigma_points() new_points = np.empty_like(points) new_points[:, 0] = points[:, 1] new_points[:, 1] = points[:, 0] - 0.5 * points[:, 1] new_points += np.array([-0.5, 3.0]) transformed_mvn = mvn.estimate_from_sigma_points(new_points) assert_array_almost_equal(transformed_mvn.mean, np.array([-0.5, 3.0])) assert_array_almost_equal(transformed_mvn.covariance, np.array([[1.0, -0.5], [-0.5, 1.25]]))
def test_probability_density_without_noise(): """Test probability density of MVN with not invertible covariance.""" random_state = check_random_state(0) n_samples = 10 x = np.linspace(0, 1, n_samples)[:, np.newaxis] y = np.ones((n_samples, 1)) samples = np.hstack((x, y)) mvn = MVN(random_state=random_state) mvn.from_samples(samples) assert_array_almost_equal(mvn.mean, np.array([0.5, 1.0]), decimal=2) assert_equal(mvn.covariance[1, 1], 0.0) p_training = mvn.to_probability_density(samples) p_test = mvn.to_probability_density(samples + 1) assert_true(np.all(p_training > p_test))
def test_regression_without_noise(): """Test regression without noise with MVN.""" random_state = check_random_state(0) n_samples = 10 x = np.linspace(0, 1, n_samples)[:, np.newaxis] y = 3 * x + 1 samples = np.hstack((x, y)) mvn = MVN(random_state=random_state) mvn.from_samples(samples) assert_array_almost_equal(mvn.mean, np.array([0.5, 2.5]), decimal=2) pred, cov = mvn.predict(np.array([0]), x) mse = np.sum((y - pred)**2) / n_samples assert_less(mse, 1e-10) assert_less(cov[0, 0], 1e-10)
def test_plot(): """Test plot of MVN.""" random_state = check_random_state(0) mvn = MVN(mean=mean, covariance=covariance, random_state=random_state) ax = AxisStub() plot_error_ellipse(ax, mvn) assert_equal(ax.count, 8)
def test_uninitialized(): """Test behavior of uninitialized MVN.""" random_state = check_random_state(0) mvn = MVN(random_state=random_state) assert_raises(ValueError, mvn.sample, 10) assert_raises(ValueError, mvn.to_probability_density, np.ones((1, 1))) assert_raises(ValueError, mvn.marginalize, np.zeros(0)) assert_raises(ValueError, mvn.condition, np.zeros(0), np.zeros(0)) assert_raises(ValueError, mvn.predict, np.zeros(0), np.zeros(0)) assert_raises(ValueError, mvn.to_ellipse) mvn = MVN(mean=np.ones(2), random_state=random_state) assert_raises(ValueError, mvn.sample, 10) assert_raises(ValueError, mvn.to_probability_density, np.ones((1, 1))) assert_raises(ValueError, mvn.marginalize, np.zeros(0)) assert_raises(ValueError, mvn.condition, np.zeros(0), np.zeros(0)) assert_raises(ValueError, mvn.predict, np.zeros(0), np.zeros(0)) assert_raises(ValueError, mvn.to_ellipse)
def test_regression_without_noise(): """Test regression without noise with MVN.""" random_state = check_random_state(0) n_samples = 10 x = np.linspace(0, 1, n_samples)[:, np.newaxis] y = 3 * x + 1 samples = np.hstack((x, y)) mvn = MVN(random_state=random_state) mvn.from_samples(samples) assert_array_almost_equal(mvn.mean, np.array([0.5, 2.5]), decimal=2) pred, cov = mvn.predict(np.array([0]), x) mse = np.sum((y - pred) ** 2) / n_samples assert_less(mse, 1e-10) assert_less(cov[0, 0], 1e-10)
def test_unscented_transform_linear_transformation(): """Test unscented transform with a linear transformation.""" mvn = MVN(mean=np.zeros(2), covariance=np.eye(2), random_state=42) points = mvn.sigma_points() new_points = np.copy(points) new_points[:, 1] *= 10 new_points += np.array([0.5, -3.0]) transformed_mvn = mvn.estimate_from_sigma_points(new_points) assert_array_almost_equal(transformed_mvn.mean, np.array([0.5, -3.0])) assert_array_almost_equal(transformed_mvn.covariance, np.array([[1.0, 0.0], [0.0, 100.0]])) sample1 = transformed_mvn.sample(1) sample2 = mvn.estimate_from_sigma_points(new_points, random_state=42).sample(1) assert_array_almost_equal(sample1, sample2)
def test_squared_mahalanobis_distance(): """Test Mahalanobis distance.""" mvn = MVN(mean=np.zeros(2), covariance=np.eye(2)) assert_almost_equal(np.sqrt(mvn.squared_mahalanobis_distance(np.zeros(2))), 0.0) assert_almost_equal(np.sqrt(mvn.squared_mahalanobis_distance(np.array([0, 1]))), 1.0) mvn = MVN(mean=np.zeros(2), covariance=4.0 * np.eye(2)) assert_almost_equal(np.sqrt(mvn.squared_mahalanobis_distance(np.array([2, 0]))), 1.0) assert_almost_equal(np.sqrt(mvn.squared_mahalanobis_distance(np.array([2, 2]))), np.sqrt(2))
def test_unscented_transform_projection_to_more_dimensions(): """Test unscented transform with a projection to 3D.""" mvn = MVN(mean=np.zeros(2), covariance=np.eye(2), random_state=42) points = mvn.sigma_points() def f(points): new_points = np.empty((len(points), 3)) new_points[:, 0] = points[:, 0] new_points[:, 1] = points[:, 1] new_points[:, 2] = -0.5 * points[:, 0] + 0.5 * points[:, 1] new_points += np.array([-0.5, 3.0, 10.0]) return new_points transformed_mvn = mvn.estimate_from_sigma_points(f(points)) assert_array_almost_equal(transformed_mvn.mean, np.array([-0.5, 3.0, 10.0])) assert_array_almost_equal( transformed_mvn.covariance, np.array([[1.0, 0.0, -0.5], [0.0, 1.0, 0.5], [-0.5, 0.5, 0.5]]))
def test_regression_with_2d_input(): """Test regression with MVN and two-dimensional input.""" random_state = check_random_state(0) n_samples = 100 x = np.linspace(0, 1, n_samples)[:, np.newaxis] y = 3 * x + 1 noise = random_state.randn(n_samples, 1) * 0.01 y += noise samples = np.hstack((x, x[::-1], y)) mvn = MVN(random_state=random_state) mvn.from_samples(samples) assert_array_almost_equal(mvn.mean, np.array([0.5, 0.5, 2.5]), decimal=2) x_test = np.hstack((x, x[::-1])) pred, cov = mvn.predict(np.array([0, 1]), x_test) mse = np.sum((y - pred) ** 2) / n_samples assert_less(mse, 1e-3) assert_less(cov[0, 0], 0.01)
def test_regression_with_2d_input(): """Test regression with MVN and two-dimensional input.""" random_state = check_random_state(0) n_samples = 100 x = np.linspace(0, 1, n_samples)[:, np.newaxis] y = 3 * x + 1 noise = random_state.randn(n_samples, 1) * 0.01 y += noise samples = np.hstack((x, x[::-1], y)) mvn = MVN(random_state=random_state) mvn.from_samples(samples) assert_array_almost_equal(mvn.mean, np.array([0.5, 0.5, 2.5]), decimal=2) x_test = np.hstack((x, x[::-1])) pred, cov = mvn.predict(np.array([0, 1]), x_test) mse = np.sum((y - pred)**2) / n_samples assert_less(mse, 1e-3) assert_less(cov[0, 0], 0.01)
def test_unscented_transform_quadratic(): """Test unscented transform with a quadratic transformation.""" mvn = MVN(mean=np.zeros(2), covariance=np.eye(2), random_state=42) points = mvn.sigma_points(alpha=0.67, kappa=5.0) def f(points): new_points = np.empty_like(points) new_points[:, 0] = points[:, 0] ** 2 * np.sign(points[:, 0]) new_points[:, 1] = points[:, 1] ** 2 * np.sign(points[:, 1]) new_points += np.array([5.0, -3.0]) return new_points transformed_mvn = mvn.estimate_from_sigma_points(f(points), alpha=0.67, kappa=5.0) assert_array_almost_equal(transformed_mvn.mean, np.array([5.0, -3.0])) assert_array_almost_equal( transformed_mvn.covariance, np.array([[3.1, 0.0], [0.0, 3.1]]), decimal=1 )
def safe_sample(self, alpha): self._check_initialized() # Safe prior sampling priors = self.priors.copy() priors[priors < 1.0 / self.n_components] = 0.0 priors /= priors.sum() assert abs(priors.sum() - 1.0) < 1e-5 mvn_index = self.random_state.choice(self.n_components, size=1, p=priors)[0] # Allow only samples from alpha-confidence region mvn = MVN(mean=self.means[mvn_index], covariance=self.covariances[mvn_index], random_state=self.random_state) sample = mvn.sample(1)[0] while (mahalanobis_distance(sample, mvn) > chi2(len(sample) - 1).ppf(alpha)): sample = mvn.sample(1)[0] return sample
def test_estimate_moments(): """Test moments estimated from samples and sampling from MVN.""" random_state = check_random_state(0) actual_mean = np.array([0.0, 1.0]) actual_covariance = np.array([[0.5, -1.0], [-1.0, 5.0]]) X = random_state.multivariate_normal(actual_mean, actual_covariance, size=(100000,)) mvn = MVN(random_state=random_state) mvn.from_samples(X) assert_less(np.linalg.norm(mvn.mean - actual_mean), 0.02) assert_less(np.linalg.norm(mvn.covariance - actual_covariance), 0.02) X2 = mvn.sample(n_samples=100000) mvn2 = MVN(random_state=random_state) mvn2.from_samples(X2) assert_less(np.linalg.norm(mvn2.mean - actual_mean), 0.03) assert_less(np.linalg.norm(mvn2.covariance - actual_covariance), 0.03)
def test_plot(): """Test plot of MVN.""" random_state = check_random_state(0) mvn = MVN(mean=mean, covariance=covariance, random_state=random_state) ax = AxisStub() try: plot_error_ellipse(ax, mvn) except ImportError: raise SkipTest("matplotlib is required for this test") assert_equal(ax.count, 8) plot_error_ellipse(ax, mvn, color="r") assert_equal(ax.count, 16)
import numpy as np import matplotlib.pyplot as plt from sklearn.utils import check_random_state from gmr import MVN, GMM, plot_error_ellipses if __name__ == "__main__": random_state = check_random_state(0) n_samples = 10 X = np.ndarray((n_samples, 2)) X[:, 0] = np.linspace(0, 2 * np.pi, n_samples) X[:, 1] = 1 - 3 * X[:, 0] + random_state.randn(n_samples) mvn = MVN(random_state=0) mvn.from_samples(X) X_test = np.linspace(0, 2 * np.pi, 100) mean, covariance = mvn.predict(np.array([0]), X_test[:, np.newaxis]) plt.figure(figsize=(10, 5)) plt.subplot(1, 2, 1) plt.title("Linear: $p(Y | X) = \mathcal{N}(\mu_{Y|X}, \Sigma_{Y|X})$") plt.scatter(X[:, 0], X[:, 1]) y = mean.ravel() s = covariance.ravel() plt.fill_between(X_test, y - s, y + s, alpha=0.2) plt.plot(X_test, y, lw=2)
print(__doc__) import numpy as np import matplotlib.pyplot as plt from gmr.utils import check_random_state from gmr import MVN, GMM, plot_error_ellipses random_state = check_random_state(0) n_samples = 10 X = np.ndarray((n_samples, 2)) X[:, 0] = np.linspace(0, 2 * np.pi, n_samples) X[:, 1] = 1 - 3 * X[:, 0] + random_state.randn(n_samples) mvn = MVN(random_state=0) mvn.from_samples(X) X_test = np.linspace(0, 2 * np.pi, 100) mean, covariance = mvn.predict(np.array([0]), X_test[:, np.newaxis]) plt.figure(figsize=(10, 5)) plt.subplot(1, 2, 1) plt.title("Linear: $p(Y | X) = \mathcal{N}(\mu_{Y|X}, \Sigma_{Y|X})$") plt.scatter(X[:, 0], X[:, 1]) y = mean.ravel() s = covariance.ravel() plt.fill_between(X_test, y - s, y + s, alpha=0.2) plt.plot(X_test, y, lw=2)
angle = 180 * angle / np.pi ell = mpl.patches.Ellipse(mean, v[0], v[1], 180 + angle, color=color) ell.set_clip_box(splot.bbox) ell.set_alpha(0.5) splot.add_artist(ell) X_test = X_test[:, np.newaxis] Y_test = Y_test[:, np.newaxis] for index in range(len(X_test)): model = Y_[index] mvn = MVN(mean=gmm.means_[model], covariance=gmm.covars_[model], random_state=random_state) #print np.argmax(gmm.covars_[model]), model if (np.argmax(gmm.covars_[model]) == 0): conditioned = mvn.condition(x_axes, X_test[index]) plt.scatter(X_test[index], conditioned.mean, s=200, marker=">", color="k") elif (np.argmax(gmm.covars_[model]) == 3): conditioned_y = mvn.condition(y_axes, Y_test[index]) plt.scatter(conditioned_y.mean, Y_test[index], s=200, marker=">", color="k") plt.show()
plt.figure(figsize=(12, 4)) # parameters of unscented transform, these are the defaults: alpha = 1e-3 beta = 2.0 # lower values give better estimates kappa = 0.0 ax = plt.subplot(131) ax.set_title("(1) Cartesian coordinates") ax.set_xlabel("$x_1$") ax.set_ylabel("$x_2$") ax.set_xlim((-8, 8)) ax.set_ylim((-8, 8)) mvn_cartesian = MVN( mean=np.array([2.5, 1.3]), covariance=np.array([[1.0, -1.5], [-1.5, 4.0]]), random_state=0) plot_error_ellipse(ax, mvn_cartesian) samples_cartesian = mvn_cartesian.sample(1000) ax.scatter(samples_cartesian[:, 0], samples_cartesian[:, 1], s=1) ax = plt.subplot(132) ax.set_title("(2) Polar coordinates") ax.set_xlabel("$r$") ax.set_ylabel("$\phi$") ax.set_xlim((-8, 8)) ax.set_ylim((-8, 8)) sigma_points_cartesian = mvn_cartesian.sigma_points(alpha=alpha, kappa=kappa) sigma_points_polar = cartesian_to_polar(sigma_points_cartesian) mvn_polar = mvn_cartesian.estimate_from_sigma_points(sigma_points_polar, alpha=alpha, beta=beta, kappa=kappa) plot_error_ellipse(ax, mvn_polar)
if isinstance(seed, (numbers.Integral, np.integer)): return np.random.RandomState(seed) if isinstance(seed, np.random.RandomState): return seed raise ValueError("%r cannot be used to seed a numpy.random.RandomState" " instance" % seed) random_state = check_random_state(0) x_axes = np.array([0]) # x-axis X = np.loadtxt("samples3d", unpack=True) print X.shape gmm = mixture.GMM(n_components=4, covariance_type="full") gmm.fit(X) print gmm.means_ print gmm.covars_ Y = gmm.predict(X) test_point = X[0] test_point_model = Y[0] print np.array(test_point[0]) mvn = MVN(mean=gmm.means_[test_point_model], covariance=gmm.covars_[test_point_model], random_state=random_state) conditioned = mvn.condition([0], np.array([-1.0], [1.0])) print conditioned.mean, mvn.mean
======================================== Sometimes we want to avoid sampling regions of low probability. We will see how this can be done in this example. We compare unconstrained sampling with sampling from the 95.45 % and 68.27 % confidence regions. In a one-dimensional Gaussian these would correspond to the 2-sigma and sigma intervals respectively. """ import numpy as np from scipy.stats import chi2 import matplotlib.pyplot as plt from gmr import MVN, plot_error_ellipse random_state = np.random.RandomState(100) mvn = MVN(mean=np.array([0.0, 0.0]), covariance=np.array([[1.0, 2.0], [2.0, 9.0]]), random_state=random_state) def sample_confidence_region(mvn, n_samples, alpha): return np.array( [_sample_confidence_region(mvn, alpha) for _ in range(n_samples)]) def _sample_confidence_region(mvn, alpha): sample = mvn.sample(1)[0] while (mahalanobis_distance(sample, mvn) > chi2(len(sample) - 1).ppf(alpha)): sample = mvn.sample(1)[0] return sample
====================================================== The maximum likelihood estimate (MLE) of an MVN can be computed directly. Then we can sample from the estimated distribution or compute the marginal distributions. """ print(__doc__) import numpy as np import matplotlib.pyplot as plt from gmr.utils import check_random_state from gmr import MVN, plot_error_ellipse random_state = check_random_state(0) mvn = MVN(random_state=random_state) X = random_state.multivariate_normal([0.0, 1.0], [[0.5, 1.5], [1.5, 5.0]], size=(100,)) mvn.from_samples(X) X_sampled = mvn.sample(n_samples=100) plt.figure(figsize=(15, 5)) plt.subplot(1, 3, 1) plt.xlim((-10, 10)) plt.ylim((-10, 10)) plot_error_ellipse(plt.gca(), mvn) plt.scatter(X[:, 0], X[:, 1], c="g", label="Training data") plt.scatter(X_sampled[:, 0], X_sampled[:, 1], c="r", label="Samples") plt.title("Bivariate Gaussian") plt.legend(loc="best")
The maximum likelihood estimate (MLE) of an MVN can be computed directly. Then we can sample from the estimated distribution or compute the marginal distributions. """ print(__doc__) import numpy as np import matplotlib.pyplot as plt from sklearn.utils import check_random_state from gmr import MVN, plot_error_ellipse if __name__ == "__main__": random_state = check_random_state(0) mvn = MVN(random_state=random_state) X = random_state.multivariate_normal([0.0, 1.0], [[0.5, -2.0], [-2.0, 5.0]], size=(100,)) mvn.from_samples(X) X_sampled = mvn.sample(n_samples=100) plt.figure(figsize=(15, 5)) plt.subplot(1, 3, 1) plt.xlim((-10, 10)) plt.ylim((-10, 10)) plot_error_ellipse(plt.gca(), mvn) plt.scatter(X[:, 0], X[:, 1], c="g", label="Training data") plt.scatter(X_sampled[:, 0], X_sampled[:, 1], c="r", label="Samples") plt.title("Bivariate Gaussian") plt.legend(loc="best")
#del xdata[0] Y_point = float(px)*100 #ydata.append(Y_point) #del ydata[0] X2[0, 0] = X_point X2[0, 1] = Y_point Y_ = gmm.predict(X2) model = Y_[0] mvn = MVN(mean=gmm.means_[model], covariance=gmm.covars_[model], random_state=random_state) if (np.argmax(gmm.covars_[model]) == 0): conditioned = mvn.condition(x_axes, np.array([X_point])) plt.scatter(X_point, conditioned.mean, s=200, marker=">", alpha=0.5,color="k") elif (np.argmax(gmm.covars_[model]) == 3): conditioned_y = mvn.condition(y_axes, np.array([Y_point])) plt.scatter(conditioned_y.mean, Y_point, s=200, marker=">", alpha=0.5,color="k") plt.scatter(X_point, Y_point, color=color_list[model]) #line.set_xdata(xdata) #line.set_ydata(ydata) plt.draw()
plt.scatter(X_t[Y_ == i, 0], X_t[Y_ == i, 1], 2, color=color) # Plot an ellipse to show the Gaussian component angle = np.arctan(u[1] / u[0]) angle = 180 * angle / np.pi ell = mpl.patches.Ellipse(mean, v[0], v[1], 180 + angle, color=color) ell.set_clip_box(splot.bbox) ell.set_alpha(0.5) splot.add_artist(ell) for index in range(len(X_test)): model = Y_[index] X_point = X_test[index] Y_point = Y_test[index] MappingPoints.append([float(X_point), float(Y_point)]) mvn = MVN(mean=clf.means_[model], covariance=clf.covars_[model], random_state=random_state) if (np.argmax(clf.covars_[model]) == 0): conditioned = mvn.condition(x_axes, X_point) #plt.scatter(X_point, conditioned.mean, s=200, marker=">", color="k") MappedPoints.append([float(X_point), float(conditioned.mean)]) elif (np.argmax(clf.covars_[model]) == 3): conditioned_y = mvn.condition(y_axes, Y_point) #plt.scatter(conditioned_y.mean, Y_point, s=200, marker=">", color="k") MappedPoints.append([float(conditioned_y.mean), float(Y_point)]) MappingPoints = np.array(MappingPoints) MappedPoints = np.array(MappedPoints)
def test_in_confidence_region(): """Test check for confidence region.""" mvn = MVN(mean=np.array([1.0, 2.0]), covariance=np.array([[1.0, 0.0], [0.0, 4.0]])) alpha_1sigma = 0.6827 alpha_2sigma = 0.9545 assert_true(mvn.is_in_confidence_region(mvn.mean, alpha_1sigma)) assert_true(mvn.is_in_confidence_region(mvn.mean + np.array([1.0, 0.0]), alpha_1sigma)) assert_false(mvn.is_in_confidence_region(mvn.mean + np.array([1.001, 0.0]), alpha_1sigma)) assert_true(mvn.is_in_confidence_region(mvn.mean + np.array([2.0, 0.0]), alpha_2sigma)) assert_false(mvn.is_in_confidence_region(mvn.mean + np.array([3.0, 0.0]), alpha_2sigma)) assert_true(mvn.is_in_confidence_region(mvn.mean + np.array([0.0, 1.0]), alpha_1sigma)) assert_true(mvn.is_in_confidence_region(mvn.mean + np.array([0.0, 2.0]), alpha_1sigma)) assert_false(mvn.is_in_confidence_region(mvn.mean + np.array([0.0, 3.0]), alpha_1sigma)) assert_true(mvn.is_in_confidence_region(mvn.mean + np.array([0.0, 4.0]), alpha_2sigma)) assert_false(mvn.is_in_confidence_region(mvn.mean + np.array([0.0, 4.001]), alpha_2sigma))
""" ======================================================== Confidence Interval of a 1D Standard Normal Distribution ======================================================== We plot the 0.6827 confidence interval of a standard normal distribution in one dimension. The confidence interval is marked by green lines and the region outside of the confidence interval is marked by red lines. """ print(__doc__) import matplotlib.pyplot as plt import numpy as np from gmr import MVN mvn = MVN(mean=[0.0], covariance=[[1.0]]) alpha = 0.6827 X = np.linspace(-3, 3, 101)[:, np.newaxis] P = mvn.to_probability_density(X) for x, p in zip(X, P): conf = mvn.is_in_confidence_region(x, alpha) color = "g" if conf else "r" plt.plot([x[0], x[0]], [0, p], color=color) plt.plot(X.ravel(), P) plt.xlabel("x") plt.ylabel("Probability Density $p(x)$") plt.show()
Sometimes we want to avoid sampling regions of low probability. We will see how this can be done in this example. We compare unconstrained sampling with sampling from the 95.45 % and 68.27 % confidence regions. In a one-dimensional Gaussian these would correspond to the 2-sigma and sigma intervals respectively. """ print(__doc__) import numpy as np import matplotlib.pyplot as plt from gmr import MVN, plot_error_ellipse random_state = np.random.RandomState(100) mvn = MVN(mean=np.array([0.0, 0.0]), covariance=np.array([[1.0, 2.0], [2.0, 9.0]]), random_state=random_state) n_samples = 1000 plt.figure(figsize=(15, 5)) ax = plt.subplot(131) ax.set_title("Unconstrained Sampling") samples = mvn.sample(n_samples) ax.scatter(samples[:, 0], samples[:, 1], alpha=0.9, s=1, label="Samples") plot_error_ellipse(ax, mvn, factors=(1.0, 2.0), color="orange") ax.set_xlim((-5, 5)) ax.set_ylim((-10, 10)) ax = plt.subplot(132)
def test_is_in_confidence_region_1d(): mvn = MVN(mean=[0.0], covariance=[[1.0]]) assert_true(mvn.is_in_confidence_region([0.0], 1.0))