예제 #1
0
Expected: D2 = 2 / (alpha - 1) = 2.0

Of course, this value is not due to the existence of any invariant
measure.  What is being measured here is the fractal dimension of the
Brownian trail.  The scaling region would vanish if we impose a nonzero
Theiler window, telling us that the underlying system is not
low dimensional.
"""

import numpy as np
import matplotlib.pyplot as plt
from nolitsa import d2, data, utils

np.random.seed(101)
x = utils.rescale(data.falpha(alpha=2.0, length=(2 ** 14))[:10 * 1000])

dim = np.arange(1, 10 + 1)
tau = 500

plt.title('Local $D_2$ vs $r$ for Brown noise')
plt.xlabel(r'Distance $r$')
plt.ylabel(r'Local $D_2$')

for r, c in d2.c2_embed(x, tau=tau, dim=dim, window=0,
                        r=utils.gprange(0.001, 1.0, 100)):
    plt.semilogx(r[2:-2], d2.d2(r, c, hwin=2), color='#4682B4')

plt.semilogx(utils.gprange(0.001, 1.0, 100), 2.0 * np.ones(100),
             color='#000000')
plt.show()
예제 #2
0
파일: ikeda.py 프로젝트: yanyan-cas/nolitsa
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""D2 of the Ikeda map.

The estimates here match the "accepted" value of 1.690 quite closely.
"""

import numpy as np
import matplotlib.pyplot as plt
from nolitsa import d2, data, utils

x = utils.rescale(data.ikeda(length=5000)[:, 0])

dim = np.arange(1, 10 + 1)
tau = 1

plt.title('Local $D_2$ vs $r$ for Ikeda map')
plt.xlabel(r'Distance $r$')
plt.ylabel(r'Local $D_2$')

for r, c in d2.c2_embed(x,
                        tau=tau,
                        dim=dim,
                        window=2,
                        r=utils.gprange(0.001, 1.0, 100)):
    plt.semilogx(r[3:-3], d2.d2(r, c), color='#4682B4')

plt.semilogx(utils.gprange(0.001, 1.0, 100),
             1.690 * np.ones(100),
             color='#000000')
plt.show()
예제 #3
0
def test_rescale():
    # Test utils.rescale()
    x = 1.0 + np.random.random(100)
    y = utils.rescale(x, interval=(-np.pi, np.pi))
    assert_(abs(np.min(y)) == np.max(y) == np.pi)
예제 #4
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""D2 of the Lorenz system.

The estimates here match the "accepted" value of 2.068 quite closely.
"""

import numpy as np
import matplotlib.pyplot as plt
from nolitsa import d2, data, utils

x = utils.rescale(data.lorenz(length=5000)[1][:, 0])

dim = np.arange(1, 10 + 1)
tau = 5

plt.title('Local $D_2$ vs $r$ for Lorenz attractor')
plt.xlabel(r'Distance $r$')
plt.ylabel(r'Local $D_2$')

for r, c in d2.c2_embed(x, tau=tau, dim=dim, window=50):
    plt.semilogx(r[3:-3], d2.d2(r, c), color='#4682B4')

plt.semilogx(utils.gprange(0.001, 1.0, 100),
             2.068 * np.ones(100),
             color='#000000')
plt.show()
예제 #5
0
# -*- coding: utf-8 -*-
"""Correlation sum/D2 for a spiral.

A spiral, though a one-dimensional curve, is a nonstationary object.
Thus, the estimated correlation dimension would heavily depend on the
Theiler window used.  However, the values of C(r) at large r's would
roughly be the same.
"""

import numpy as np
import matplotlib.pyplot as plt
from nolitsa import d2, utils

phi = np.linspace(2 * np.pi, 52 * np.pi, 1000)
x = phi * np.cos(phi)
x = utils.rescale(x)

dim = np.arange(1, 10 + 1)
tau = 10
r = utils.gprange(0.01, 1.0, 100)

plt.figure(1)
plt.title('Correlation sum $C(r)$ without any Theiler window')
plt.xlabel(r'Distance $r$')
plt.ylabel(r'Correlation sum $C(r)$')

for r, c in d2.c2_embed(x, tau=tau, dim=dim, window=0, r=r):
    plt.loglog(r, c)

plt.figure(2)
plt.title('Correlation sum $C(r)$ with a Theiler window of 100')
예제 #6
0
But the quality of reconstruction depends on the noise level.  Adding
an insignificant amount of noise does not help at all!  This is
probably one of the rare case where a higher level of additive noise
improves the results.
"""

from nolitsa import data, dimension, utils
import matplotlib.pyplot as plt
import numpy as np

# Generate data.
x = data.roessler(length=5000)[1][:, 0]

# Convert to 8-bit.
x = np.int8(utils.rescale(x, (-127, 127)))

# Add uniform noise of two different noise levels.
y1 = x + (-0.001 + 0.002 * np.random.random(len(x)))
y2 = x + (-0.5 + 1.0 * np.random.random(len(x)))

# AFN algorithm.
dim = np.arange(1, 10 + 2)
F, Fs = dimension.afn(y1, tau=14, dim=dim, window=40)
F1, F2 = F[1:] / F[:-1], Fs[1:] / Fs[:-1]

E, Es = dimension.afn(y2, tau=14, dim=dim, window=40)
E1, E2 = E[1:] / E[:-1], Es[1:] / Es[:-1]

plt.figure(1)
plt.title(r'AFN after corrupting with uniform noise in $[-0.001, 0.001]$')
예제 #7
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""D2 of the Henon map.

The estimates here match the "accepted" value of 1.220 quite closely.
"""

import numpy as np
import matplotlib.pyplot as plt
from nolitsa import d2, data, utils

x = utils.rescale(data.henon(length=5000)[:, 0])

dim = np.arange(1, 10 + 1)
tau = 1

plt.title('Local $D_2$ vs $r$ for Henon map')
plt.xlabel(r'Distance $r$')
plt.ylabel(r'Local $D_2$')

for r, c in d2.c2_embed(x, tau=tau, dim=dim, window=2,
                        r=utils.gprange(0.001, 1.0, 100)):
    plt.semilogx(r[3:-3], d2.d2(r, c), color='#4682B4')

plt.semilogx(utils.gprange(0.001, 1.0, 100), 1.220 * np.ones(100),
             color='#000000')
plt.show()
예제 #8
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""D2 of the Rössler oscillator.

The estimates here match the "accepted" value of 1.991 quite closely.
"""

import numpy as np
import matplotlib.pyplot as plt
from nolitsa import d2, data, utils

x0 = [-3.2916983, -1.42162302, 0.02197593]
x = utils.rescale(data.roessler(length=5000, x0=x0)[1][:, 0])

dim = np.arange(1, 10 + 1)
tau = 14

plt.title(u'Local $D_2$ vs $r$ for Rössler oscillator')
plt.xlabel(r'Distance $r$')
plt.ylabel(r'Local $D_2$')

for r, c in d2.c2_embed(x,
                        tau=tau,
                        dim=dim,
                        window=50,
                        r=utils.gprange(0.001, 1.0, 100)):
    plt.semilogx(r[3:-3], d2.d2(r, c), color='#4682B4')

plt.semilogx(utils.gprange(0.001, 1.0, 100),
             1.991 * np.ones(100),
             color='#000000')
예제 #9
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""D2 for the Mackey-Glass system.

The estimates here are, depending on the initial condition, sometimes
lower than the value (D2 ~ 2.4) calculated by Grassberger & Procaccia
(1983).  One should use average over an ensemble of initial conditions
in such a case.
"""

import numpy as np
import matplotlib.pyplot as plt
from nolitsa import d2, data, utils

x = utils.rescale(data.mackey_glass(tau=23.0, sample=0.46, n=1000))

# Since we're resampling the time series using a sampling step of
# 0.46, the time delay required is 23.0/0.46 = 50.
tau = 50
dim = np.arange(1, 10 + 1)

plt.title('Local $D_2$ vs $r$ for Mackey-Glass system')
plt.xlabel(r'Distance $r$')
plt.ylabel(r'Local $D_2$')

for r, c in d2.c2_embed(x, tau=tau, dim=dim, window=100,
                        r=utils.gprange(0.001, 1.0, 100)):
    plt.semilogx(r[3:-3], d2.d2(r, c), color='#4682B4')

plt.semilogx(utils.gprange(0.001, 1.0, 100), 2.4 * np.ones(100),
예제 #10
0
The data was obtained from 2048 continuous samples of dataset B1
(starting from the 12750th) of the Santa Fe time series contest [1].
This data is low dimensional, and is thought to be a limit cycle [See
Example 10.7 of Kantz & Schreiber (2004).]  As can be seen, the
structure of the limit cycle is much more prominent when the filtered
time series is used.

[1]: http://www.physionet.org/physiobank/database/santa-fe/
"""

import numpy as np
from nolitsa import noise, utils
import matplotlib.pyplot as plt

x = utils.rescale(np.loadtxt('../series/br2.dat')[:, 1])
y = noise.nored(x, dim=7, r=0.23, repeat=5, tau=1)

plt.figure(1)
plt.title('Noisy human breath data')
plt.xlabel(r'$x(t)$')
plt.ylabel(r'$x(t + \tau)$')
plt.plot(x[:-1], x[1:], '.')

plt.figure(2)
plt.title('Filtered human breath data')
plt.xlabel(r'$x(t)$')
plt.ylabel(r'$x(t + \tau)$')
plt.plot(y[:-1], y[1:], '.')

plt.show()