Пример #1
0
    def test_line(self):
        # Test dimension.afn() by embedding a line.
        # Particle moving uniformly in 1-D.
        a, b = np.random.random(2)
        t = np.arange(100)
        x = a + b * t
        dim = np.arange(1, 10 + 2)
        window = 10

        # Chebyshev distances between near-neighbors remain bounded.
        # This gives "cleaner" results when embedding known objects like
        # a line.  For a line, E = 1.0 for all dimensions as expected,
        # whereas it is (d + 1) / d (for cityblock) and sqrt(d + 1) /
        # sqrt(d) for Euclidean.  In both cases, E -> 1.0 at large d,
        # but E = 1.0 is definitely preferred.
        for metric in ('chebyshev', 'cityblock', 'euclidean'):
            Es_des = (window + 1) * b

            if metric == 'chebyshev':
                E_des = 1.0
            elif metric == 'cityblock':
                E_des = (dim + 1) / dim
            elif metric == 'euclidean':
                E_des = np.sqrt((dim + 1) / dim)

            E, Es = dimension.afn(x, dim=dim, metric=metric)

            assert_allclose(E_des, E)
            assert_allclose(Es_des, Es)
Пример #2
0
    def test_noise(self):
        # Test dimension.afn() using uncorrelated random numbers.
        x = np.random.random(1000)
        dim = np.arange(1, 5 + 2)
        window = 10
        metric = 'chebyshev'
        E, Es = dimension.afn(x, dim=dim, metric=metric, window=window)
        _, E2 = E[1:] / E[:-1], Es[1:] / Es[:-1]

        # The standard deviation of E2 should be ~ 0 for uncorrelated
        # random numbers [Ramdani et al., Physica D 223, 229 (2006)].
        # Additionally, the mean of E2 should be ~ 1.0.
        assert_allclose(np.std(E2), 0, atol=0.1)
        assert_allclose(np.mean(E2), 1, atol=0.1)
Пример #3
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""AFN for time series from the Henon map.

E1 saturates near an embedding dimension of 2.  E2 != 1 at many values
of d.  Thus the series is definitely deterministic.  The plot matches
Fig. 1 of Cao (1997) pretty well.
"""

from nolitsa import data, dimension
import matplotlib.pyplot as plt
import numpy as np

# Generate data.
x = data.henon()[:, 0]

# AFN algorithm.
dim = np.arange(1, 10 + 2)
E, Es = dimension.afn(x, tau=1, dim=dim, window=5)
E1, E2 = E[1:] / E[:-1], Es[1:] / Es[:-1]

plt.title(r'AFN for time series from the Henon map')
plt.xlabel(r'Embedding dimension $d$')
plt.ylabel(r'$E_1(d)$ and $E_2(d)$')
plt.plot(dim[:-1], E1, 'bo-', label=r'$E_1(d)$')
plt.plot(dim[:-1], E2, 'go-', label=r'$E_2(d)$')
plt.legend()

plt.show()
Пример #4
0
The minimum embedding dimension comes out to be 5-7 (depending on the
initial condition) with both E1 and E2 curves giving very strong hints
of determinism.  According to Grassberger & Procaccia (1983) the
correlation dimension of the Mackey-Glass system with a delay of 23 is
~ 2.5.  Thus, the results are definitely comparable.
"""

import numpy as np
import matplotlib.pyplot as plt
from nolitsa import data, dimension

x = data.mackey_glass(tau=23.0, sample=0.46, n=1000)

# Since we're resampling the time series using a sampling step of
# 0.46, the time delay required is 23.0/0.46 = 50.
tau = 50
dim = np.arange(1, 16 + 2)

# AFN algorithm.
E, Es = dimension.afn(x, tau=tau, dim=dim, window=100)
E1, E2 = E[1:] / E[:-1], Es[1:] / Es[:-1]

plt.title(r'AFN for time series from the Mackey-Glass system')
plt.xlabel(r'Embedding dimension $d$')
plt.ylabel(r'$E_1(d)$ and $E_2(d)$')
plt.plot(dim[:-1], E1, 'bo-', label=r'$E_1(d)$')
plt.plot(dim[:-1], E2, 'go-', label=r'$E_2(d)$')
plt.legend()

plt.show()
Пример #5
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""AFN for time series from the Rössler oscillator.

E1 saturates near an embedding dimension of 4.  E2 != 1 at many values
of d.  Thus, the series is definitely deterministic.
"""

from nolitsa import data, dimension
import matplotlib.pyplot as plt
import numpy as np

# Generate data.
x = data.roessler()[1][:, 0]

# AFN algorithm.
dim = np.arange(1, 10 + 2)
E, Es = dimension.afn(x, tau=14, dim=dim, window=45, metric='cityblock')
E1, E2 = E[1:] / E[:-1], Es[1:] / Es[:-1]

plt.title(r'AFN for time series from the Rössler oscillator')
plt.xlabel(r'Embedding dimension $d$')
plt.ylabel(r'$E_1(d)$ and $E_2(d)$')
plt.plot(dim[:-1], E1, 'bo-', label=r'$E_1(d)$')
plt.plot(dim[:-1], E2, 'go-', label=r'$E_2(d)$')
plt.legend()

plt.show()
Пример #6
0
import matplotlib.pyplot as plt
import numpy as np

# Generate data.
x = data.roessler(length=5000)[1][:, 0]

# Convert to 8-bit.
x = np.int8(utils.rescale(x, (-127, 127)))

# Add uniform noise of two different noise levels.
y1 = x + (-0.001 + 0.002 * np.random.random(len(x)))
y2 = x + (-0.5 + 1.0 * np.random.random(len(x)))

# AFN algorithm.
dim = np.arange(1, 10 + 2)
F, Fs = dimension.afn(y1, tau=14, dim=dim, window=40)
F1, F2 = F[1:] / F[:-1], Fs[1:] / Fs[:-1]

E, Es = dimension.afn(y2, tau=14, dim=dim, window=40)
E1, E2 = E[1:] / E[:-1], Es[1:] / Es[:-1]

plt.figure(1)
plt.title(r'AFN after corrupting with uniform noise in $[-0.001, 0.001]$')
plt.xlabel(r'Embedding dimension $d$')
plt.ylabel(r'$E_1(d)$ and $E_2(d)$')
plt.plot(dim[:-1], F1, 'bo-', label=r'$E_1(d)$')
plt.plot(dim[:-1], F2, 'go-', label=r'$E_2(d)$')
plt.legend()

plt.figure(2)
plt.title(r'AFN after corrupting with uniform noise in $[-0.5, 0.5]$')
Пример #7
0
np.random.seed(999)
eta = np.random.normal(size=(N), loc=0, scale=1.0)
a = 0.99

x[0] = eta[0]
for i in range(1, N):
    x[i] = a * x[i - 1] + eta[i]

x = utils.rescale(x)

# Calculate the autocorrelation time.
tau = np.argmax(delay.acorr(x) < 1.0 / np.e)

# AFN without any minimum temporal separation.
dim = np.arange(1, 10 + 2)
F, Fs = dimension.afn(x, tau=tau, dim=dim, window=0)
F1, F2 = F[1:] / F[:-1], Fs[1:] / Fs[:-1]

# AFN with a minimum temporal separation (equal to the autocorrelation
# time) between near-neighbors.
dim = np.arange(1, 10 + 2)
E, Es = dimension.afn(x, tau=tau, dim=dim, window=tau)
E1, E2 = E[1:] / E[:-1], Es[1:] / Es[:-1]

plt.figure(1)
plt.title(r'AR(1) process with $a = 0.99$')
plt.xlabel(r'i')
plt.ylabel(r'$x_i$')
plt.plot(x)

plt.figure(2)
Пример #8
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""AFN for time series from the Lorenz attractor.

E1 saturates near an embedding dimension of 3.  E2 != 1 at many values
of d.  Thus the series is definitely deterministic.  The plot matches
Fig. 3 of Cao (1997) rather nicely.
"""

from nolitsa import data, dimension
import matplotlib.pyplot as plt
import numpy as np

# Generate data.
x = data.lorenz()[1][:, 0]

# AFN algorithm.
dim = np.arange(1, 10 + 2)
E, Es = dimension.afn(x, tau=5, dim=dim, window=20)
E1, E2 = E[1:] / E[:-1], Es[1:] / Es[:-1]

plt.title(r'AFN for time series from the Lorenz attractor')
plt.xlabel(r'Embedding dimension $d$')
plt.ylabel(r'$E_1(d)$ and $E_2(d)$')
plt.plot(dim[:-1], E1, 'bo-', label=r'$E_1(d)$')
plt.plot(dim[:-1], E2, 'go-', label=r'$E_2(d)$')
plt.legend()

plt.show()