def test_base_differential_entropy_with_axis_0_is_equal_to_default(self): random_state = np.random.RandomState(0) values = random_state.standard_normal((100, 3)) entropy = stats.differential_entropy(values, axis=0) default_entropy = stats.differential_entropy(values) assert_allclose(entropy, default_entropy)
def test_differential_entropy_vasicek_2d_nondefault_axis(self): random_state = np.random.RandomState(0) values = random_state.standard_normal((3, 100)) entropy = stats.differential_entropy(values, axis=1, method='vasicek') assert_allclose( entropy, [1.342551, 1.341826, 1.293775], rtol=1e-6, ) entropy = stats.differential_entropy(values, axis=1, window_length=1, method='vasicek') assert_allclose( entropy, [1.122044, 1.102944, 1.129616], rtol=1e-6, ) entropy = stats.differential_entropy(values, axis=1, window_length=8, method='vasicek') assert_allclose( entropy, [1.349401, 1.338514, 1.292332], rtol=1e-6, )
def test_base_differential_entropy_transposed(self): random_state = np.random.RandomState(0) values = random_state.standard_normal((3, 100)) assert_allclose( stats.differential_entropy(values.T).T, stats.differential_entropy(values, axis=1), )
def test_input_validation(self): x = np.random.rand(10) message = "`base` must be a positive number or `None`." with pytest.raises(ValueError, match=message): stats.differential_entropy(x, base=-2) message = "`method` must be one of..." with pytest.raises(ValueError, match=message): stats.differential_entropy(x, method='ekki-ekki')
def test_consistency(self, method): # test that method is a consistent estimator n = 10000 if method == 'correa' else 1000000 rvs = stats.norm.rvs(size=n, random_state=0) expected = stats.norm.entropy() res = stats.differential_entropy(rvs, method=method) assert_allclose(res, expected, rtol=0.005)
def test_differential_entropy_vasicek(self): random_state = np.random.RandomState(0) values = random_state.standard_normal(100) entropy = stats.differential_entropy(values, method='vasicek') assert_allclose(entropy, 1.342551, rtol=1e-6) entropy = stats.differential_entropy(values, window_length=1, method='vasicek') assert_allclose(entropy, 1.122044, rtol=1e-6) entropy = stats.differential_entropy(values, window_length=8, method='vasicek') assert_allclose(entropy, 1.349401, rtol=1e-6)
def test_differential_entropy_raises_value_error(self): random_state = np.random.RandomState(0) values = random_state.standard_normal((3, 100)) error_str = ( r"Window length \({window_length}\) must be positive and less " r"than half the sample size \({sample_size}\).") sample_size = values.shape[1] for window_length in {-1, 0, sample_size // 2, sample_size}: formatted_error_str = error_str.format( window_length=window_length, sample_size=sample_size, ) with assert_raises(ValueError, match=formatted_error_str): stats.differential_entropy( values, window_length=window_length, axis=1, )
def test_expon_rmse_std(self, method, expected): # test that RMSE and standard deviation of estimators matches values # given in differential_entropy reference [6]. Incidentally, also # tests vectorization. reps, n, m = 10000, 50, 7 rmse_expected, std_expected = expected rvs = stats.expon.rvs(size=(reps, n), random_state=0) true_entropy = stats.expon.entropy() res = stats.differential_entropy(rvs, window_length=m, method=method, axis=-1) assert_allclose(np.sqrt(np.mean((res - true_entropy)**2)), rmse_expected, atol=0.005) assert_allclose(np.std(res), std_expected, atol=0.002)
def test_method_auto(self, n, method): rvs = stats.norm.rvs(size=(n, ), random_state=0) res1 = stats.differential_entropy(rvs) res2 = stats.differential_entropy(rvs, method=method) assert res1 == res2