Пример #1
0
def incren(ts, m, R, fd=True):
    from pyentrp import entropy as ent
    diff = np.diff(ts)
    subvectors = ent.util_pattern_space(diff, lag=1, dim=m)
    signs = np.sign(subvectors)
    allqs = []
    for i in range(len(subvectors)):
        sd = np.std(subvectors[i, :])
        row = subvectors[i, :]
        qs = []
        for element in row:
            q = R * np.abs(element) / sd
            q = np.min([R, q])
            q = np.floor(q)
            qs.append(q)
        allqs.append(qs)
    allqs = np.vstack(allqs)
    words = np.multiply(signs, allqs)
    unq_rows, count = np.unique(words, axis=0, return_counts=1)
    out = {tuple(i): j for i, j in zip(unq_rows, count)}
    freq_dict = {}
    for key in out.keys():
        freq = {key: out[key] / len(words)}
        freq_dict.update(freq)
    ent = 0
    for freq in freq_dict.values():
        ent += freq * np.log2(freq)
    if fd == True:
        return -1 * ent, freq_dict
    else:
        return -1 * ent, words
Пример #2
0
 def test_utilSequence(self):
     self.assertRaises(Exception, ent.util_pattern_space,
                       (TIME_SERIES, 0, 2))
     self.assertRaises(Exception, ent.util_pattern_space,
                       (TIME_SERIES, 10, 20))
     np.testing.assert_array_equal(
         ent.util_pattern_space(TIME_SERIES, 2, 3),
         np.array([[1, 1, 3], [1, 2, 4], [1, 3, 5]]))
Пример #3
0
import collections 

import keras
from keras.layers import Dense, Bidirectional, LSTM, TimeDistributed
from keras.optimizers import Adam



##########

hist = yf.download(tickers = "DJI", period = 'max')



words = [] 
dow_df = ent.util_pattern_space(hist_sma, lag = 1, dim = 50)
dow_df = dow_df[:]
for i in range(len(dow_df)):
    dat_znorm = znorm(dow_df[i,:])
    dat_paa= paa(dat_znorm, 3)
    word = ts_to_string(dat_paa, cuts_for_asize(2))
    words.append(word)
print(words)


print(collections.Counter(words))

from sklearn.preprocessing import LabelEncoder
le=LabelEncoder()
sqn = le.fit_transform(words)