def run_main_prog(params):
    #start = time.clock()
    (m,numSig) = params # m is index for noise level, numSig is idex for realization
    numSurr = 99
    SignalAndSurr = numpy.zeros((numSurr+1,N),dtype='float32')
    noise = NL[m]*numpy.std(SignalX[numSig,:].copy())*numpy.random.normal(0,1,N)
    SignalAndSurr[0,:] = SignalX[numSig,:].copy()+noise
    for j in range(1,numSurr+1,1):
         SignalAndSurr[j,:] = UnivariateSurrogates(SignalAndSurr[0,:].copy(),120)
    
    T = numpy.zeros((numSurr+1,1),dtype='float32')          
    
    for k in range(0,numSurr+1,1):
        ts = SignalAndSurr[k,:]
        ts.shape = (-1,1)
        psv = RecurrencePlot.embed_time_series(ts,dim=3,tau=3)
        randomVertices = random.sample(xrange(psv.shape[0]), int(sys.argv[1]))
        R = RecurrenceNetwork(psv[randomVertices,:],recurrence_rate=float(sys.argv[2]),silence_level=2)
        T[k] = R.transitivity() # compute network measure for hypothesis testing

    if T[0] > max(T[1:]): #non-parametric testing
         H0 = 1 #null-hypothesis rejected
    else:
         H0 = 0
    #elapsed = (time.clock() - start)
    #print elapsed
    return (H0)
#  Calculate and print the recurrence rate again to check if it worked...
RR = rp.recurrence_rate()
print("Recurrence rate:", RR)

#  Calculate some standard RQA measures
DET = rp.determinism(l_min=2)
LAM = rp.laminarity(v_min=2)

print("Determinism:", DET)
print("Laminarity:", LAM)

#  Generate a recurrence network at fixed recurrence rate
rn = RecurrenceNetwork(time_series,
                       dim=DIM,
                       tau=TAU,
                       metric=METRIC,
                       normalize=False,
                       recurrence_rate=RR)

#  Calculate average path length, transitivity and assortativity
L = rn.average_path_length()
T = rn.transitivity()
C = rn.global_clustering()
R = rn.assortativity()

print("Average path length:", L)
print("Transitivity:", T)
print("Global clustering:", C)
print("Assortativity:", R)
        local_result[measure] = np.empty(t_steps)

    #  Initialize progress bar
    progress = progressbar.ProgressBar().start()

    #  Loop over moving windows
    for j in xrange(t_steps):
        #  Get time series section for current window
        time_series = data[j * delta:j * delta + T_embedded]
        local_step_sequence[j] = j * delta + T_embedded / 2

        #  Prepare recurrence network from original data
        rec_net = RecurrenceNetwork(time_series.flatten(),
                                    dim=DIM,
                                    tau=TAU,
                                    metric=METRIC,
                                    normalize=False,
                                    silence_level=2,
                                    recurrence_rate=RR)

        #  Calculations for original recurrence network
        local_result["Average path length"][j] = rec_net.average_path_length()
        local_result["Transitivity"][j] = rec_net.transitivity()

        #local_result["Assortativity"][j] = rec_net.assortativity()
        #local_result["Diameter"][j] = rec_net.diameter()

        #  Calculate RQA measures
        #local_result["Determinism"][j] = rec_net.determinism()
        #local_result["Laminarity"][j] = rec_net.laminarity()
        #local_result["Mean diagonal line length"][j] = rec_net.average_diaglength()
Exemple #4
0
#https://www.researchgate.net/post/How_can_we_find_out_which_value_of_embedding_dimensions_is_more_accurate
#when choosing emb_dim for Takens, each dimension should have at least 10 dp ==> 10^1 == 1D, 10^2 == 2D, ..., 10^6 == 6D 

#FALSE NEAREST NEIGHBOR FOR DETERMINING MINIMAL EMBEDDING DIMENSION

#MEASURES OF COMPLEXITY
# https://hackaday.io/project/707-complexity-of-a-time-series

# general entropy with discrete pdf - [H = sum_i - p_i * log( p_i)] , we cannot use because we have not well defined states
# Approximate entropy 


# Recurrency plot
from pyunicorn.timeseries import RecurrenceNetwork
x = np.sin(np.linspace(0, 10*np.pi, 1000))
net = RecurrenceNetwork(x, recurrence_rate = 0.05)

# NOTE WE CAN ALWAYS MAKE ALL THE ROLLING ITEMS IN PY AND ANALYSE IN R!



########

# Parameters
window_size = 100
emb_dim = 4
rolling = rolling_window(df.logR_ask.dropna(), window_size, 10) #dropped NaN from logR
#rolling_ns = rolling_window(df.ask, window_size, 10)
#rolling_ts = rolling_window(df.index, window_size, 10)

df_ = pd.DataFrame(rolling)
import numpy as np
from pyunicorn.timeseries import RecurrenceNetwork

x = np.sin(np.linspace(0, 10 * np.pi, 1000))
net = RecurrenceNetwork(x, recurrence_rate=0.05)
print net.transitivity()
#  Generate a recurrence plot object with fixed recurrence rate RR
rp = RecurrencePlot(time_series, dim=DIM, tau=TAU, metric=METRIC,
                    normalize=False, recurrence_rate=RR)

#  Calculate and print the recurrence rate again to check if it worked...
RR = rp.recurrence_rate()
print "Recurrence rate:", RR

#  Calculate some standard RQA measures
DET = rp.determinism(l_min=2)
LAM = rp.laminarity(v_min=2)

print "Determinism:", DET
print "Laminarity:", LAM

#  Generate a recurrence network at fixed recurrence rate
rn = RecurrenceNetwork(time_series, dim=DIM, tau=TAU, metric=METRIC,
                       normalize=False, recurrence_rate=RR)

#  Calculate average path length, transitivity and assortativity
L = rn.average_path_length()
T = rn.transitivity()
C = rn.global_clustering()
R = rn.assortativity()

print "Average path length:", L
print "Transitivity:", T
print "Global clustering:", C
print "Assortativity:", R
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 19 11:25:43 2015

@author: George
"""

from __future__ import absolute_import, division, print_function, unicode_literals
import numpy as np
from pyunicorn.timeseries import RecurrenceNetwork

x = np.sin(np.linspace(0, 10 * np.pi, 1000))
net = RecurrenceNetwork(x, recurrence_rate=0.05)
print(net.transitivity())