Exemple #1
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""AFN for time series from the Henon map.

E1 saturates near an embedding dimension of 2.  E2 != 1 at many values
of d.  Thus the series is definitely deterministic.  The plot matches
Fig. 1 of Cao (1997) pretty well.
"""

from nolitsa import data, dimension
import matplotlib.pyplot as plt
import numpy as np

# Generate data.
x = data.henon()[:, 0]

# AFN algorithm.
dim = np.arange(1, 10 + 2)
E, Es = dimension.afn(x, tau=1, dim=dim, window=5)
E1, E2 = E[1:] / E[:-1], Es[1:] / Es[:-1]

plt.title(r'AFN for time series from the Henon map')
plt.xlabel(r'Embedding dimension $d$')
plt.ylabel(r'$E_1(d)$ and $E_2(d)$')
plt.plot(dim[:-1], E1, 'bo-', label=r'$E_1(d)$')
plt.plot(dim[:-1], E2, 'go-', label=r'$E_2(d)$')
plt.legend()

plt.show()
Exemple #2
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""D2 of the Henon map.

The estimates here match the "accepted" value of 1.220 quite closely.
"""

import numpy as np
import matplotlib.pyplot as plt
from nolitsa import d2, data, utils

x = utils.rescale(data.henon(length=5000)[:, 0])

dim = np.arange(1, 10 + 1)
tau = 1

plt.title('Local $D_2$ vs $r$ for Henon map')
plt.xlabel(r'Distance $r$')
plt.ylabel(r'Local $D_2$')

for r, c in d2.c2_embed(x, tau=tau, dim=dim, window=2,
                        r=utils.gprange(0.001, 1.0, 100)):
    plt.semilogx(r[3:-3], d2.d2(r, c), color='#4682B4')

plt.semilogx(utils.gprange(0.001, 1.0, 100), 1.220 * np.ones(100),
             color='#000000')
plt.show()
Exemple #3
0
    Notes
    -----
    The skew statistic is often touted to have good distinguishing power
    between nonlinearity and linearity.  But it is known to fail
    miserably in both cases (i.e., it often judges nonlinear series as
    linear and vice-versa) and should be avoided for serious analysis.
    """
    dx = x[t:] - x[:-t]
    dx = dx - np.mean(dx)
    return np.mean(dx**3) / np.mean(dx**2)**1.5


# Start with time series from the Henon map, take the first-difference,
# shuffle the increments, and calculate the cumulative sum of the
# shuffled increments.
x = data.henon(length=(2**12))[:, 0]
dx = x[1:] - x[:-1]
np.random.shuffle(dx)
x = np.cumsum(dx)

plt.figure(1)

plt.subplot(121)
plt.title('Actual')
plt.xlabel(r'$x(t)$')
plt.ylabel(r'$x(t + \tau)$')
plt.plot(x[:-5], x[5:])

plt.subplot(122)
plt.title('Reversed')
plt.xlabel(r'$\hat{x}(t)$')
Exemple #4
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Maximum Lyapunov exponent exponent of the Henon map.

The "accepted" value is ~ 0.419, which is quite close to the estimates
we get here.  See Fig. 3(b) of Rosenstein et al. (1993).
"""

from nolitsa import data, lyapunov
import numpy as np
import matplotlib.pyplot as plt

x = data.henon(length=5000)[:, 0]

# Time delay.
tau = 1

# Embedding dimension.
dim = [2]

d = lyapunov.mle_embed(x, dim=dim, tau=tau, maxt=25)[0]
t = np.arange(25)

plt.title('Maximum Lyapunov exponent for the Henon system')
plt.xlabel(r'Time $t$')
plt.ylabel(r'Average divergence $\langle d_i(t) \rangle$')
plt.plot(t, d)
plt.plot(t, t * 0.419 + d[0], '--')

plt.show()