Ejemplo n.º 1
0
def test_uniform1():
    """ Test uniform distribution """
    for n in range(2, 10):
        d = uniform(n)
        assert d.outcomes == tuple(range(n))
        assert d[0] == pytest.approx(1 / n)
        assert entropy(d) == pytest.approx(np.log2(n))
Ejemplo n.º 2
0
def test_uniform1():
    """ Test uniform distribution """
    for n in range(2, 10):
        d = uniform(n)
        assert d.outcomes == tuple(range(n))
        assert d[0] == pytest.approx(1/n)
        assert entropy(d) == pytest.approx(np.log2(n))
Ejemplo n.º 3
0
def test_uniform1():
    """ Test uniform distribution """
    for n in range(2, 10):
        d = uniform(n)
        assert_equal(d.outcomes, tuple(range(n)))
        assert_almost_equal(d[0], 1 / n)
        assert_almost_equal(entropy(d), np.log2(n))
Ejemplo n.º 4
0
def test_uniform1():
    """ Test uniform distribution """
    for n in range(2, 10):
        d = uniform(n)
        assert_equal(d.outcomes, tuple(range(n)))
        assert_almost_equal(d[0], 1/n)
        assert_almost_equal(entropy(d), np.log2(n))
Ejemplo n.º 5
0
def test_simple_rd_5():
    """
    Test against know result, using blahut-arimoto.
    """
    dist = Distribution(['0', '1'], [1 / 2, 1 / 2])
    rd = RDCurve(dist, beta_num=10, method='ba')
    for r, d in zip(rd.rates, rd.distortions):
        assert r == pytest.approx(1 - entropy(d))
Ejemplo n.º 6
0
def test_simple_rd_1():
    """
    Test against know result, using scipy.
    """
    dist = Distribution(['0', '1'], [1 / 2, 1 / 2])
    rd = RDCurve(dist, beta_num=10)
    for r, d in zip(rd.rates, rd.distortions):
        assert r == pytest.approx(1 - entropy(d))
Ejemplo n.º 7
0
def test_simple_rd_5():
    """
    Test against know result, using blahut-arimoto.
    """
    dist = Distribution(['0', '1'], [1/2, 1/2])
    rd = RDCurve(dist, beta_num=10, method='ba')
    for r, d in zip(rd.rates, rd.distortions):
        assert r == pytest.approx(1 - entropy(d))
Ejemplo n.º 8
0
def test_simple_rd_1():
    """
    Test against know result, using scipy.
    """
    dist = Distribution(['0', '1'], [1/2, 1/2])
    rd = RDCurve(dist, beta_num=10)
    for r, d in zip(rd.rates, rd.distortions):
        assert r == pytest.approx(1 - entropy(d))
"""

import pytest

from dit import Distribution as D
from dit.multivariate import wyner_common_information as C
from dit.multivariate.common_informations.wyner_common_information import WynerCommonInformation
from dit.shannon import entropy

outcomes = ['0000', '0001', '0110', '0111', '1010', '1011', '1100', '1101']
pmf = [1 / 8] * 8
xor = D(outcomes, pmf)

sbec = lambda p: D(['00', '0e', '1e', '11'], [(1 - p) / 2, p / 2, p / 2,
                                              (1 - p) / 2])
C_sbec = lambda p: 1 if p < 1 / 2 else entropy(p)


@pytest.mark.slow
@pytest.mark.flaky(reruns=5)
@pytest.mark.parametrize(('rvs', 'crvs', 'val'), [
    (None, None, 2.0),
    ([[0], [1], [2]], None, 2.0),
    ([[0], [1]], [2, 3], 1.0),
    ([[0], [1]], [2], 1.0),
    ([[0], [1]], None, 0.0),
])
def test_wci1(rvs, crvs, val):
    """
    Test against known values.
    """
Ejemplo n.º 10
0
"""

import pytest

from dit import Distribution as D
from dit.multivariate import exact_common_information as G
from dit.multivariate.common_informations.exact_common_information import ExactCommonInformation
from dit.shannon import entropy

outcomes = ['0000', '0001', '0110', '0111', '1010', '1011', '1100', '1101']
pmf = [1 / 8] * 8
xor = D(outcomes, pmf)

sbec = lambda p: D(['00', '0e', '1e', '11'], [(1 - p) / 2, p / 2, p / 2,
                                              (1 - p) / 2])
G_sbec = lambda p: min(1, entropy(p) + 1 - p)


@pytest.mark.slow
@pytest.mark.flaky(reruns=5)
@pytest.mark.parametrize(('rvs', 'crvs', 'val'), [
    (None, None, 2.0),
    ([[0], [1], [2]], None, 2.0),
    ([[0], [1]], [2, 3], 1.0),
    ([[0], [1]], [2], 1.0),
    ([[0], [1]], None, 0.0),
])
def test_eci1(rvs, crvs, val):
    """
    Test against known values.
    """
from __future__ import division

import pytest

from dit import Distribution as D
from dit.multivariate import exact_common_information as G
from dit.multivariate.common_informations.exact_common_information import ExactCommonInformation
from dit.shannon import entropy

outcomes = ['0000', '0001', '0110', '0111', '1010', '1011', '1100', '1101']
pmf = [1/8]*8
xor = D(outcomes, pmf)

sbec = lambda p: D(['00', '0e', '1e', '11'], [(1-p)/2, p/2, p/2, (1-p)/2])
G_sbec = lambda p: min(1, entropy(p) + 1 - p)


@pytest.mark.slow
@pytest.mark.flaky(reruns=5)
@pytest.mark.parametrize(('rvs', 'crvs', 'val'), [
    (None, None, 2.0),
    ([[0], [1], [2]], None, 2.0),
    ([[0], [1]], [2, 3], 1.0),
    ([[0], [1]], [2], 1.0),
    ([[0], [1]], None, 0.0),
])
def test_eci1(rvs, crvs, val):
    """
    Test against known values.
    """
from __future__ import division
import pytest

from dit import Distribution as D
from dit.multivariate import wyner_common_information as C
from dit.multivariate.common_informations.wyner_common_information import WynerCommonInformation
from dit.shannon import entropy


outcomes = ['0000', '0001', '0110', '0111', '1010', '1011', '1100', '1101']
pmf = [1/8]*8
xor = D(outcomes, pmf)

sbec = lambda p: D(['00', '0e', '1e', '11'], [(1-p)/2, p/2, p/2, (1-p)/2])
C_sbec = lambda p: 1 if p < 1/2 else entropy(p)


@pytest.mark.slow
@pytest.mark.flaky(reruns=5)
@pytest.mark.parametrize(('rvs', 'crvs', 'val'), [
    (None, None, 2.0),
    ([[0], [1], [2]], None, 2.0),
    ([[0], [1]], [2, 3], 1.0),
    ([[0], [1]], [2], 1.0),
    ([[0], [1]], None, 0.0),
])
def test_wci1(rvs, crvs, val):
    """
    Test against known values.
    """