예제 #1
0
def test_complexity_vs_R():

    signal = pd.read_csv(
        "https://raw.githubusercontent.com/neuropsychology/NeuroKit/master/data/bio_eventrelated_100hz.csv"
    )["RSP"].values
    r = 0.2 * np.std(signal, ddof=1)

    # ApEn
    apen = nk.entropy_approximate(signal, dimension=2, r=r)
    assert np.allclose(apen, 0.04383386, atol=0.0001)
    apen = nk.entropy_approximate(signal, dimension=3, delay=2, r=1)
    assert np.allclose(apen, 0.0004269369, atol=0.0001)
    apen = nk.entropy_approximate(signal[0:200], dimension=2, delay=1, r=r)
    assert np.allclose(apen, 0.03632554, atol=0.0001)

    # SampEn
    sampen = nk.entropy_sample(signal[0:300], dimension=2, r=r)
    assert np.allclose(sampen,
                       nk.entropy_sample(signal[0:300],
                                         dimension=2,
                                         r=r,
                                         distance="infinity"),
                       atol=0.001)
    assert np.allclose(sampen, 0.03784376, atol=0.001)
    sampen = nk.entropy_sample(signal[0:300], dimension=3, delay=2, r=r)
    assert np.allclose(sampen, 0.09185509, atol=0.01)
예제 #2
0
def test_complexity():

    signal = np.cos(np.linspace(start=0, stop=30, num=100))


    # Shannon
    assert np.allclose(nk.entropy_shannon(signal) - pyentrp.shannon_entropy(signal), 0)


    # Approximate
    assert np.allclose(nk.entropy_approximate(signal), 0.17364897858477146)
    assert np.allclose(nk.entropy_approximate(signal, 2, 0.2*np.std(signal, ddof=1)) - entropy_app_entropy(signal, 2), 0)

    assert nk.entropy_approximate(signal, 2, 0.2*np.std(signal, ddof=1)) != pyeeg_ap_entropy(signal, 2, 0.2*np.std(signal, ddof=1))


    # Sample
    assert np.allclose(nk.entropy_sample(signal, 2, 0.2*np.std(signal, ddof=1)) - entropy_sample_entropy(signal, 2), 0)
    assert np.allclose(nk.entropy_sample(signal, 2, 0.2) - nolds.sampen(signal, 2, 0.2), 0)
    assert np.allclose(nk.entropy_sample(signal, 2, 0.2) - entro_py_sampen(signal, 2, 0.2, scale=False), 0)
    assert np.allclose(nk.entropy_sample(signal, 2, 0.2) - pyeeg_samp_entropy(signal, 2, 0.2), 0)

    assert nk.entropy_sample(signal, 2, 0.2) != pyentrp.sample_entropy(signal, 2, 0.2)[1]
    assert nk.entropy_sample(signal, 2, 0.2*np.sqrt(np.var(signal))) != MultiscaleEntropy_sample_entropy(signal, 2, 0.2)[0.2][2]

    # MSE
#    assert nk.entropy_multiscale(signal, 2, 0.2*np.sqrt(np.var(signal))) != np.trapz(MultiscaleEntropy_mse(signal, [i+1 for i in range(10)], 2, 0.2, return_type="list"))
#    assert nk.entropy_multiscale(signal, 2, 0.2*np.std(signal, ddof=1)) != np.trapz(pyentrp.multiscale_entropy(signal, 2, 0.2, 10))

    # Fuzzy
    assert np.allclose(nk.entropy_fuzzy(signal, 2, 0.2, 1) - entro_py_fuzzyen(signal, 2, 0.2, 1, scale=False), 0)
예제 #3
0
def test_complexity():

    signal = np.cos(np.linspace(start=0, stop=30, num=100))

    # Shannon
    assert np.allclose(nk.entropy_shannon(signal),
                       6.6438561897747395,
                       atol=0.0000001)
    assert nk.entropy_shannon(signal) == pyentrp.shannon_entropy(signal)

    # Approximate
    assert np.allclose(nk.entropy_approximate(signal),
                       0.17364897858477146,
                       atol=0.000001)
    assert np.allclose(nk.entropy_approximate(np.array([85, 80, 89] * 17)),
                       1.0996541105257052e-05,
                       atol=0.000001)
    #    assert nk.entropy_approximate(signal, 2, 0.2) == pyeeg.ap_entropy(signal, 2, 0.2)

    # Sample
    assert np.allclose(nk.entropy_sample(signal,
                                         order=2,
                                         r=0.2 * np.std(signal)),
                       nolds.sampen(signal,
                                    emb_dim=2,
                                    tolerance=0.2 * np.std(signal)),
                       atol=0.000001)
    #    assert nk.entropy_sample(signal, 2, 0.2) == pyeeg.samp_entropy(signal, 2, 0.2)
    #    pyentrp.sample_entropy(signal, 2, 0.2)  # Gives something different

    # Fuzzy
    assert np.allclose(nk.entropy_fuzzy(signal),
                       0.5216395432372958,
                       atol=0.000001)
예제 #4
0
def test_complexity_vs_Python():

    signal = np.cos(np.linspace(start=0, stop=30, num=100))

    # Shannon
    shannon = nk.entropy_shannon(signal)
    #    assert scipy.stats.entropy(shannon, pd.Series(signal).value_counts())
    assert np.allclose(shannon - pyentrp.shannon_entropy(signal), 0)

    # Approximate
    assert np.allclose(nk.entropy_approximate(signal), 0.17364897858477146)
    assert np.allclose(
        nk.entropy_approximate(
            signal, dimension=2, r=0.2 * np.std(signal, ddof=1)) -
        entropy_app_entropy(signal, 2), 0)

    assert nk.entropy_approximate(
        signal, dimension=2,
        r=0.2 * np.std(signal, ddof=1)) != pyeeg_ap_entropy(
            signal, 2, 0.2 * np.std(signal, ddof=1))

    # Sample
    assert np.allclose(
        nk.entropy_sample(signal, dimension=2, r=0.2 * np.std(signal, ddof=1))
        - entropy_sample_entropy(signal, 2), 0)
    assert np.allclose(
        nk.entropy_sample(signal, dimension=2, r=0.2) -
        nolds.sampen(signal, 2, 0.2), 0)
    assert np.allclose(
        nk.entropy_sample(signal, dimension=2, r=0.2) -
        entro_py_sampen(signal, 2, 0.2, scale=False), 0)
    assert np.allclose(
        nk.entropy_sample(signal, dimension=2, r=0.2) -
        pyeeg_samp_entropy(signal, 2, 0.2), 0)

    #    import sampen
    #    sampen.sampen2(signal[0:300], mm=2, r=r)

    assert nk.entropy_sample(signal,
                             dimension=2, r=0.2) != pyentrp.sample_entropy(
                                 signal, 2, 0.2)[1]
    assert nk.entropy_sample(
        signal, dimension=2,
        r=0.2 * np.sqrt(np.var(signal))) != MultiscaleEntropy_sample_entropy(
            signal, 2, 0.2)[0.2][2]

    # MSE
    #    assert nk.entropy_multiscale(signal, 2, 0.2*np.sqrt(np.var(signal))) != np.trapz(MultiscaleEntropy_mse(signal, [i+1 for i in range(10)], 2, 0.2, return_type="list"))
    #    assert nk.entropy_multiscale(signal, 2, 0.2*np.std(signal, ddof=1)) != np.trapz(pyentrp.multiscale_entropy(signal, 2, 0.2, 10))

    # Fuzzy
    assert np.allclose(
        nk.entropy_fuzzy(signal, dimension=2, r=0.2, delay=1) -
        entro_py_fuzzyen(signal, 2, 0.2, 1, scale=False), 0)

    # DFA
    assert nk.fractal_dfa(signal, windows=np.array([
        4, 8, 12, 20
    ])) != nolds.dfa(signal, nvals=[4, 8, 12, 20], fit_exp="poly")
예제 #5
0
def test_complexity():

    signal = np.cos(np.linspace(start=0, stop=30, num=100))

    # Shannon
    assert np.allclose(
        nk.entropy_shannon(signal) - pyentrp.shannon_entropy(signal), 0)

    # Approximate
    assert np.allclose(nk.entropy_approximate(signal), 0.17364897858477146)
    assert np.allclose(
        nk.entropy_approximate(signal, 2, 0.2 * np.std(signal, ddof=1)) -
        entropy_app_entropy(signal, 2), 0)

    assert nk.entropy_approximate(
        signal, 2, 0.2 * np.std(signal, ddof=1)) != pyeeg_ap_entropy(
            signal, 2, 0.2 * np.std(signal, ddof=1))

    # Sample
    assert np.allclose(
        nk.entropy_sample(signal, 2, 0.2 * np.std(signal, ddof=1)) -
        entropy_sample_entropy(signal, 2), 0)
    assert np.allclose(
        nk.entropy_sample(signal, 2, 0.2) - nolds.sampen(signal, 2, 0.2), 0)
    assert np.allclose(
        nk.entropy_sample(signal, 2, 0.2) -
        entro_py_sampen(signal, 2, 0.2, scale=False), 0)
    assert np.allclose(
        nk.entropy_sample(signal, 2, 0.2) - pyeeg_samp_entropy(signal, 2, 0.2),
        0)

    assert nk.entropy_sample(signal, 2, 0.2) != pyentrp.sample_entropy(
        signal, 2, 0.2)[1]

    # Fuzzy
    assert np.allclose(
        nk.entropy_fuzzy(signal, 2, 0.2, 1) -
        entro_py_fuzzyen(signal, 2, 0.2, 1, scale=False), 0)
예제 #6
0
def extract_entropy(X):
    sample = nk.entropy_sample(X)
    entropy = nk.entropy_approximate(X)
    return [sample, entropy]