Ejemplo n.º 1
0
def knn_graph(ts, n=None, tau=None, k=4):
    """This function creates an k-NN network represented as an adjacency matrix A using a 1-D time series
    
    Args:
        ts (1-D array): 1-D time series signal
    
    Other Parameters:
        n (Optional[int]): embedding dimension for state space reconstruction. Default is uses FNN algorithm from parameter_selection module.
        tau (Optional[int]): embedding delay fro state space reconstruction. Default uses MI algorithm from parameter_selection module.
        k (Optional[int]): number of nearest neighbors for graph formation.
        
    Returns:
        [2-D square array]: A (2-D weighted and directed square adjacency matrix)
    """

    #import sub modules
    import os
    import sys
    sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
    sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
    from teaspoon.SP import tsa_tools

    if tau == None:
        from parameter_selection import MI_delay
        tau = MI_delay.MI_for_delay(ts,
                                    method='basic',
                                    h_method='sturge',
                                    k=2,
                                    ranking=True)
    if n == None:
        from parameter_selection import FNN_n
        perc_FNN, n = FNN_n.FNN_n(ts, tau)

    ETS = tsa_tools.takens(ts, n, tau)  #get embedded time series

    distances, indices = tsa_tools.k_NN(ETS, k=k)
    #gets distances between embedded vectors and the indices of the nearest neighbors for every vector

    A = Adjacency_KNN(indices)  #get adjacency matrix (weighted, directional)

    return A
Ejemplo n.º 2
0
    plt.xticks(size=TextSize)
    plt.yticks(size=TextSize)
    plt.ylabel(r'$h(3)$', size=TextSize)
    plt.xlabel(r'$\tau$', size=TextSize)
    plt.show()

    #-----------------------------------persistent entropy---------------------------

    import numpy as np
    #generate a simple time series with noise
    t = np.linspace(0, 20, 200)
    ts = np.sin(t) + np.random.normal(0, 0.1, len(t))

    from teaspoon.SP.tsa_tools import takens
    #embed the time series into 2 dimension space using takens embedding
    embedded_ts = takens(ts, n=2, tau=15)

    from ripser import ripser
    #calculate the rips filtration persistent homology
    result = ripser(embedded_ts, maxdim=1)
    diagram = result['dgms']

    #--------------------Plot embedding and persistence diagram---------------
    import matplotlib.pyplot as plt
    import matplotlib.gridspec as gridspec
    gs = gridspec.GridSpec(1, 2)
    plt.figure(figsize=(12, 5))
    TextSize = 17
    MS = 4

    ax = plt.subplot(gs[0, 0])
Ejemplo n.º 3
0
def cgss_graph(ts,
               n=None,
               tau=None,
               B=10,
               embedding_method='standard',
               binning_method='equal_size'):
    """This function creates a coarse grained state space network represented as an adjacency matrix A using a 1-D time series. Note: this method reduces the size of the adjacency matrix by removing unused vertices.
    
    Args:
        ts (1-D array): 1-D time series signal
    
    Other Parameters:
        n (Optional[int]): embedding dimension for state space reconstruction. Default is uses MsPE algorithm from parameter_selection module.
        tau (Optional[int]): embedding delay fro state space reconstruction. Default uses MsPE algorithm from parameter_selection module.
        B (Optional[int]): number of states per dimension. Default is 10.
        embedding_method (Optional[string]): 'standard' or 'difference' with 'standard' as default. 'Standard' uses the normal state space reconstruction vectors and 'difference' uses change in the vector.
        binning_method (Optional[string]): Binning method set as 'equal_size' or 'equal_frequency' with 'equal_size' as default.
        
    Returns:
        [2-D square array]: A (2-D weighted and directed square adjacency matrix)
    """
    #get the coarse grained state space network represented as an adjacency matrix.

    #import sub modules
    from teaspoon.SP import tsa_tools
    import numpy as np

    def equalObs(
        x, B
    ):  #define function to calculate equal-frequency bins based on interpolation
        return np.interp(np.linspace(0, len(x), B), np.arange(len(x)),
                         np.sort(x))

    #----------------Get embedding parameters if not defined----------------
    if tau == None:
        from parameter_selection import MI_delay
        tau = MI_delay.MI_for_delay(ts,
                                    method='basic',
                                    h_method='sturge',
                                    k=2,
                                    ranking=True)
    if n == None:
        from parameter_selection import FNN_n
        perc_FNN, n = FNN_n.FNN_n(ts, tau)

    #get state space reconstruction from signal (SSR)
    SSR = tsa_tools.takens(ts, n, tau)

    #----------------Define how to use the embedding----------------
    if embedding_method == 'difference':  #uses standard state space reconstruction
        delta = np.diff(
            SSR, axis=1)  #get differences in SSR vectors along coordinate axis
        delta = delta.T  #transpose delta array to put in columns
        embedding = delta
        basis = B**(np.arange(n - 1))  #basis for assigning symbolic value
    if embedding_method == 'standard':  #uses differences along axis of state space reconstruction
        embedding = np.array(SSR).T
        basis = B**(np.arange(n))  #basis for assigning symbolic value

    #----------------Define how to partition the embedding----------------
    if binning_method == 'equal_frequency':
        #define bins with equal-frequency or probability (approximately)
        B_array = equalObs(embedding.flatten(), B + 1)
        B_array[-1] = B_array[-1] * (1 + 10**-10)
    if binning_method == 'equal_size':  #define bins based on equal spacing
        B_array = np.linspace(np.amin(embedding),
                              np.amax(embedding) * (1 + 10**-10), B + 1)

    #----------------digitize the embedding to a sequence----------------
    digitized_embedding = []  #prime the digitized version of deltas
    for e_i in embedding:  #nloop through n-1 delta positions
        digitzed_vector = np.digitize(
            e_i, bins=B_array)  # digitalize column delta_i
        digitized_embedding.append(
            digitzed_vector)  #append to digitalized deltas data structure
    digitized_embedding = np.array(
        digitized_embedding).T - 1  #digitalize and stacked delta vectors
    symbol_seq = np.sum(np.array(basis) * digitized_embedding,
                        axis=1)  # symbolic sequence from basis and D

    #get adjacency matrix from sequence
    A = Adjacency_CGSS(symbol_seq)

    return A