Beispiel #1
0
def causal_score(x1, x2, lag=40, embed=8):
    lag = lag
    embed = embed
    e1 = ccm.Embed(x1)
    e2 = ccm.Embed(x2)
    X1 = e1.embed_vectors_1d(lag, embed)
    X2 = e2.embed_vectors_1d(lag, embed)

    #split the embedded time series
    x1tr, x1te, x2tr, x2te = train_test_split(X1, X2, percent=.75)

    CCM = ccm.CCM()  #initiate the class

    #library lengths to test
    len_tr = len(x1tr)
    #lib_lens = np.arange(10, len_tr, len_tr/20, dtype='int')
    lib_lens = [int(len_tr)]

    #test causation
    CCM.fit(x1tr, x2tr)
    x1p, x2p = CCM.predict(x1te, x2te, lib_lengths=lib_lens)

    sc1, sc2 = CCM.score()

    return sc1, sc2
Beispiel #2
0
def causality():
    sr = 16000
    sound1, sound2, base = load_data(sr)
    # 12-33
    sound1 = sound1[12 * sr:33 * sr]
    base = base[12 * sr:33 * sr]
    embed = 2
    idx = 1
    for i in [1, 2, 4, 6, 8]:
        lag = i * sr
        e1 = ccm.Embed(sound1)
        e2 = ccm.Embed(base)
        X1 = e1.embed_vectors_1d(lag, embed)
        X2 = e2.embed_vectors_1d(lag, embed)
        x1tr, x1te, x2tr, x2te = train_test_split(X1, X2, percent=.75)
        CCM = ccm.CCM()
        len_tr = len(x1tr)
        lib_lens = np.arange(10, len_tr, len_tr / 20, dtype='int')
        CCM.fit(x1tr, x2tr)
        x1p, x2p = CCM.predict(x1te, x2te, lib_lengths=lib_lens)
        sc1, sc2 = CCM.score()
        plt.subplot(5, 1, idx)
        # plt.plot(lib_lens, sc1)
        plt.plot(
            lib_lens,
            sc2,
        )
        idx += 1
    plt.show()
Beispiel #3
0
def calculateCCM(data1, data2):
    lag = 1
    embed = 2
    lib_lens = np.cumsum([31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31])
    # lib_lens = [len(data1)]
    e1 = ccm.Embed(np.array(data1))
    e2 = ccm.Embed(np.array(data2))
    X1 = e1.embed_vectors_1d(lag, embed)
    X2 = e2.embed_vectors_1d(lag, embed)
    x1tr, x1te, x2tr, x2te = train_test_split(X1, X2, percent=.75)
    c = ccm.CCM()
    c.fit(x1tr, x2tr)
    x1p, x2p = c.predict(x1te, x2te, lib_lengths=lib_lens)
    sc1, sc2 = c.score()
    return sc1, sc2
Beispiel #4
0
def _get_time_delay(x):
    """
    Estimate accurate time-delay for attractor reconstruction.

    This function attempts to estimate the necessary time-delay for
    reconstructing the attractor dynamics of a system characterized by an
    observed time-series. Recreating attractor dynamics in this way is based on
    Taken's Embedding Theorem.

    Note: This method functions problematically for systems with an embedding
    dimension less than 3, and an expected time-delay of 1 (e.g., coupled
    logistic map), as such systems are not truely chaotic. Therefore, this method
    should be used with caution especially if there is a reason to suspect
    that the system in question may not be high dimensional or chaotic.

    Args:
        x (np.array): The vector to be evaluated

    Returns:
        (float) Estimated time-delay
    """

    def find_minima(x, num_minima=None, ignore_first=True):
        """
        Find the first n local minima of a vector.

        Args:
            x (np.array): Vector to be searched
            num_mimima (int): Number of local minima to return
            ignore_first (boolean): Drop the first local minimum found

        Returns:
            The local minima of a vector
        """

        if not isinstance(x, type(np.array([]))):
            x = np.array(x)

        if len(x.shape) > 1.0 and x.shape[0] < x.shape[1]:
            x.reshape((x.shape[1], x.shape[0]))

        zero_crosses = np.zeros(x.shape)
        for i, val in enumerate(np.diff(x)):
            if np.sign(val) != np.sign(x[i - 1]):
                zero_crosses[i] = 1
        minima = np.zeros(zero_crosses.shape)

        for i, val in enumerate(zero_crosses.tolist()):
            if val != zero_crosses[i - 1] and val == 0.0:
                minima[i] = 1.0

        if num_minima == None:
            return np.where(minima == 1.0)[0].tolist()

        if num_minima >= len(np.where(minima == 1.0)[0].tolist()):
            return np.where(minima == 1.0)[0].tolist()

        if num_minima > 0:
            return np.where(minima == 1.0)[0].tolist()[num_minima - 1]

        return None

    # Strip nan's from vector to be tested
    x = x[np.logical_not(np.isnan(x))]

    l = x.shape[0]

    # Create an Embed object from skccm
    e = skccm.Embed(x)

    # Evaulate the loss of mutual information for time-lags 0:l-1
    mi = e.mutual_information(int(l - 1))

    # Identify local minima to find the first minimum of lagged mutual information
    lag = find_minima(mi, 1)

    # Return estimated time-delay
    if type(lag) == list:
        lag = lag[0]

    return lag
Beispiel #5
0
 def calculate_causality(self, lag=1, embed=4, display=False):
     '''
     calculate causality of time series data by CCM
     '''
     with open('../data/test/testdata.json', 'r') as f:
         dataset = json.load(f)
     y1s, y2s, y3s, y4s = np.array(dataset['y1']), \
         np.array(dataset['y2']), np.array(dataset['y3']), \
         np.array(dataset['y4'])
     e1, e2, e3, e4 = ccm.Embed(y1s), ccm.Embed(y2s),\
         ccm.Embed(y3s), ccm.Embed(y4s)
     d1, d2, d3, d4 = e1.embed_vectors_1d(lag, embed),\
         e2.embed_vectors_1d(lag, embed),\
         e3.embed_vectors_1d(lag, embed),\
         e4.embed_vectors_1d(lag, embed)
     libs = np.arange(8, 404, 4, dtype='int')
     results = []
     for data_pair in itertools.combinations([d1, d2, d3, d4], 2):
         xtr, xte, ytr, yte = train_test_split(data_pair[0],
                                               data_pair[1],
                                               percent=.75)
         c = ccm.CCM()
         c.fit(xtr, ytr)
         c.predict(xte, yte, lib_lengths=libs)
         sc1, sc2 = c.score()
         results.append([sc1, sc2])
     # save data to matrix
     index = itertools.combinations(range(4), 2)
     causality_list = [[0.0] * 4 for _ in range(4)]
     for res, i in zip(results, index):
         sc1, sc2 = res[0], res[1]
         i1, i2 = i[0], i[1]
         causality_list[i1][i2] = abs((sc1[-1] - sc1[0]) * sc1[-1])
         causality_list[i2][i1] = abs((sc2[-1] - sc2[0]) * sc2[-1])
     causality = []
     for i in range(4):
         for j in range(4):
             causality.append(dict(src=i, dst=j,
                                   value=causality_list[i][j]))
     with open('../static/matrixT.json', 'w') as f:
         jsonfile = {
             'length': 4,
             'causality': causality,
             'matrix': causality_list
         }
         json.dump(jsonfile, f)
     # visaulize data
     if display:
         fig = plt.figure()
         # fig.show()
         ax = fig.add_subplot(111)
         items = [{
             'color': 'r',
             'data': ['y1', 'y2']
         }, {
             'color': 'g',
             'data': ['y1', 'y3']
         }, {
             'color': 'b',
             'data': ['y1', 'y4']
         }, {
             'color': 'c',
             'data': ['y2', 'y3']
         }, {
             'color': 'm',
             'data': ['y2', 'y4']
         }, {
             'color': 'y',
             'data': ['y3', 'y4']
         }]
         for i, v in enumerate(results):
             color = items[i]['color']
             d1, d2 = items[i]['data'][0], items[i]['data'][1]
             ax.plot(libs, v[0], c=color, ls='-', label=d1 + '->' + d2)
             ax.plot(libs, v[1], c=color, ls=':', label=d2 + '->' + d1)
         plt.legend(loc='upper right')
         plt.draw()
         plt.show()
Beispiel #6
0
    y = np.array(y)

    # visualize original data
    fig = plt.figure()
    ax = fig.add_subplot(1, 1, 1)
    ax.plot(x)
    ax.plot(y)
    plt.legend(labels=['X', 'Y'])
    plt.show()

    x1 = x
    x2 = y

    lag = 10
    embed = 2
    e1 = ccm.Embed(x1)
    e2 = ccm.Embed(x2)
    X1 = e1.embed_vectors_1d(lag, embed)
    X2 = e2.embed_vectors_1d(lag, embed)

    # scatter plot of embedding dimension
    fig = plt.figure()
    ax1 = fig.add_subplot(2, 1, 1)
    ax2 = fig.add_subplot(2, 1, 2, sharex=ax1, sharey=ax1)

    X1_t = X1.T
    ax1.scatter(X1_t[0], X1_t[1], label="X")

    X2_t = X2.T
    ax2.scatter(X2_t[0], X2_t[1], label="Y", color="orange")