def test_complexity_vs_R(): signal = pd.read_csv( "https://raw.githubusercontent.com/neuropsychology/NeuroKit/master/data/bio_eventrelated_100hz.csv" )["RSP"].values r = 0.2 * np.std(signal, ddof=1) # ApEn apen = nk.entropy_approximate(signal, dimension=2, r=r) assert np.allclose(apen, 0.04383386, atol=0.0001) apen = nk.entropy_approximate(signal, dimension=3, delay=2, r=1) assert np.allclose(apen, 0.0004269369, atol=0.0001) apen = nk.entropy_approximate(signal[0:200], dimension=2, delay=1, r=r) assert np.allclose(apen, 0.03632554, atol=0.0001) # SampEn sampen = nk.entropy_sample(signal[0:300], dimension=2, r=r) assert np.allclose(sampen, nk.entropy_sample(signal[0:300], dimension=2, r=r, distance="infinity"), atol=0.001) assert np.allclose(sampen, 0.03784376, atol=0.001) sampen = nk.entropy_sample(signal[0:300], dimension=3, delay=2, r=r) assert np.allclose(sampen, 0.09185509, atol=0.01)
def test_complexity(): signal = np.cos(np.linspace(start=0, stop=30, num=100)) # Shannon assert np.allclose(nk.entropy_shannon(signal), 6.6438561897747395, atol=0.0000001) assert nk.entropy_shannon(signal) == pyentrp.shannon_entropy(signal) # Approximate assert np.allclose(nk.entropy_approximate(signal), 0.17364897858477146, atol=0.000001) assert np.allclose(nk.entropy_approximate(np.array([85, 80, 89] * 17)), 1.0996541105257052e-05, atol=0.000001) # assert nk.entropy_approximate(signal, 2, 0.2) == pyeeg.ap_entropy(signal, 2, 0.2) # Sample assert np.allclose(nk.entropy_sample(signal, order=2, r=0.2 * np.std(signal)), nolds.sampen(signal, emb_dim=2, tolerance=0.2 * np.std(signal)), atol=0.000001) # assert nk.entropy_sample(signal, 2, 0.2) == pyeeg.samp_entropy(signal, 2, 0.2) # pyentrp.sample_entropy(signal, 2, 0.2) # Gives something different # Fuzzy assert np.allclose(nk.entropy_fuzzy(signal), 0.5216395432372958, atol=0.000001)
def test_complexity_sanity(): signal = np.cos(np.linspace(start=0, stop=30, num=1000)) # Entropy assert np.allclose(nk.entropy_fuzzy(signal), nk.entropy_sample(signal, fuzzy=True), atol=0.000001) # Fractal assert np.allclose(nk.fractal_dfa(signal, windows=np.array([4, 8, 12, 20])), 2.1009048365682133, atol=0.000001) assert np.allclose(nk.fractal_dfa(signal), 1.957966586191164, atol=0.000001) assert np.allclose(nk.fractal_dfa(signal, multifractal=True), 1.957966586191164, atol=0.000001) assert np.allclose(nk.fractal_correlation(signal), 0.7884473170763334, atol=0.000001) assert np.allclose(nk.fractal_correlation(signal, r="nolds"), nolds.corr_dim(signal, 2), atol=0.0001)
def test_complexity(): signal = np.cos(np.linspace(start=0, stop=30, num=100)) # Shannon assert np.allclose(nk.entropy_shannon(signal) - pyentrp.shannon_entropy(signal), 0) # Approximate assert np.allclose(nk.entropy_approximate(signal), 0.17364897858477146) assert np.allclose(nk.entropy_approximate(signal, 2, 0.2*np.std(signal, ddof=1)) - entropy_app_entropy(signal, 2), 0) assert nk.entropy_approximate(signal, 2, 0.2*np.std(signal, ddof=1)) != pyeeg_ap_entropy(signal, 2, 0.2*np.std(signal, ddof=1)) # Sample assert np.allclose(nk.entropy_sample(signal, 2, 0.2*np.std(signal, ddof=1)) - entropy_sample_entropy(signal, 2), 0) assert np.allclose(nk.entropy_sample(signal, 2, 0.2) - nolds.sampen(signal, 2, 0.2), 0) assert np.allclose(nk.entropy_sample(signal, 2, 0.2) - entro_py_sampen(signal, 2, 0.2, scale=False), 0) assert np.allclose(nk.entropy_sample(signal, 2, 0.2) - pyeeg_samp_entropy(signal, 2, 0.2), 0) assert nk.entropy_sample(signal, 2, 0.2) != pyentrp.sample_entropy(signal, 2, 0.2)[1] assert nk.entropy_sample(signal, 2, 0.2*np.sqrt(np.var(signal))) != MultiscaleEntropy_sample_entropy(signal, 2, 0.2)[0.2][2] # MSE # assert nk.entropy_multiscale(signal, 2, 0.2*np.sqrt(np.var(signal))) != np.trapz(MultiscaleEntropy_mse(signal, [i+1 for i in range(10)], 2, 0.2, return_type="list")) # assert nk.entropy_multiscale(signal, 2, 0.2*np.std(signal, ddof=1)) != np.trapz(pyentrp.multiscale_entropy(signal, 2, 0.2, 10)) # Fuzzy assert np.allclose(nk.entropy_fuzzy(signal, 2, 0.2, 1) - entro_py_fuzzyen(signal, 2, 0.2, 1, scale=False), 0)
def test_complexity_vs_Python(): signal = np.cos(np.linspace(start=0, stop=30, num=100)) # Shannon shannon = nk.entropy_shannon(signal) # assert scipy.stats.entropy(shannon, pd.Series(signal).value_counts()) assert np.allclose(shannon - pyentrp.shannon_entropy(signal), 0) # Approximate assert np.allclose(nk.entropy_approximate(signal), 0.17364897858477146) assert np.allclose( nk.entropy_approximate( signal, dimension=2, r=0.2 * np.std(signal, ddof=1)) - entropy_app_entropy(signal, 2), 0) assert nk.entropy_approximate( signal, dimension=2, r=0.2 * np.std(signal, ddof=1)) != pyeeg_ap_entropy( signal, 2, 0.2 * np.std(signal, ddof=1)) # Sample assert np.allclose( nk.entropy_sample(signal, dimension=2, r=0.2 * np.std(signal, ddof=1)) - entropy_sample_entropy(signal, 2), 0) assert np.allclose( nk.entropy_sample(signal, dimension=2, r=0.2) - nolds.sampen(signal, 2, 0.2), 0) assert np.allclose( nk.entropy_sample(signal, dimension=2, r=0.2) - entro_py_sampen(signal, 2, 0.2, scale=False), 0) assert np.allclose( nk.entropy_sample(signal, dimension=2, r=0.2) - pyeeg_samp_entropy(signal, 2, 0.2), 0) # import sampen # sampen.sampen2(signal[0:300], mm=2, r=r) assert nk.entropy_sample(signal, dimension=2, r=0.2) != pyentrp.sample_entropy( signal, 2, 0.2)[1] assert nk.entropy_sample( signal, dimension=2, r=0.2 * np.sqrt(np.var(signal))) != MultiscaleEntropy_sample_entropy( signal, 2, 0.2)[0.2][2] # MSE # assert nk.entropy_multiscale(signal, 2, 0.2*np.sqrt(np.var(signal))) != np.trapz(MultiscaleEntropy_mse(signal, [i+1 for i in range(10)], 2, 0.2, return_type="list")) # assert nk.entropy_multiscale(signal, 2, 0.2*np.std(signal, ddof=1)) != np.trapz(pyentrp.multiscale_entropy(signal, 2, 0.2, 10)) # Fuzzy assert np.allclose( nk.entropy_fuzzy(signal, dimension=2, r=0.2, delay=1) - entro_py_fuzzyen(signal, 2, 0.2, 1, scale=False), 0) # DFA assert nk.fractal_dfa(signal, windows=np.array([ 4, 8, 12, 20 ])) != nolds.dfa(signal, nvals=[4, 8, 12, 20], fit_exp="poly")
def test_complexity(): signal = np.cos(np.linspace(start=0, stop=30, num=100)) # Shannon assert np.allclose( nk.entropy_shannon(signal) - pyentrp.shannon_entropy(signal), 0) # Approximate assert np.allclose(nk.entropy_approximate(signal), 0.17364897858477146) assert np.allclose( nk.entropy_approximate(signal, 2, 0.2 * np.std(signal, ddof=1)) - entropy_app_entropy(signal, 2), 0) assert nk.entropy_approximate( signal, 2, 0.2 * np.std(signal, ddof=1)) != pyeeg_ap_entropy( signal, 2, 0.2 * np.std(signal, ddof=1)) # Sample assert np.allclose( nk.entropy_sample(signal, 2, 0.2 * np.std(signal, ddof=1)) - entropy_sample_entropy(signal, 2), 0) assert np.allclose( nk.entropy_sample(signal, 2, 0.2) - nolds.sampen(signal, 2, 0.2), 0) assert np.allclose( nk.entropy_sample(signal, 2, 0.2) - entro_py_sampen(signal, 2, 0.2, scale=False), 0) assert np.allclose( nk.entropy_sample(signal, 2, 0.2) - pyeeg_samp_entropy(signal, 2, 0.2), 0) assert nk.entropy_sample(signal, 2, 0.2) != pyentrp.sample_entropy( signal, 2, 0.2)[1] # Fuzzy assert np.allclose( nk.entropy_fuzzy(signal, 2, 0.2, 1) - entro_py_fuzzyen(signal, 2, 0.2, 1, scale=False), 0)
# ============================================================================= # Complexity # ============================================================================= # Generate signal signal = nk.signal_simulate(duration=20, sampling_rate=200, noise=0.01) # Find optimal Tau for time-delay embedding optimal_delay = nk.embedding_delay(signal, show=True) # Save plot fig = plt.gcf() fig.set_size_inches(10, 6) fig.savefig("README_embedding.png", dpi=300, h_pad=3) nk.entropy_sample(signal) # ============================================================================= # Statistics # ============================================================================= x = np.random.normal(loc=0, scale=1, size=100000) ci_min, ci_max = nk.hdi(x, ci=0.95, show=True) # Save plot fig = plt.gcf() fig.set_size_inches(10 / 1.5, 6 / 1.5) fig.savefig("README_hdi.png", dpi=300, h_pad=3) # =============================================================================
def extract_entropy(X): sample = nk.entropy_sample(X) entropy = nk.entropy_approximate(X) return [sample, entropy]