Ejemplo n.º 1
0
def get_dataset(dataset_name):
    if dataset_name == "TAIEX":
        return TAIEX.get_data()
    elif dataset_name == "NASDAQ":
        return NASDAQ.get_data()
    elif dataset_name == 'IMIV':  # Incremental Mean and Incremental Variance
        return artificial.generate_gaussian_linear(1,
                                                   0.2,
                                                   0.2,
                                                   0.05,
                                                   it=100,
                                                   num=50)
    elif dataset_name == 'IMIV0':  # Incremental Mean and Incremental Variance, lower bound equals to 0
        return artificial.generate_gaussian_linear(1,
                                                   0.2,
                                                   0.,
                                                   0.05,
                                                   vmin=0,
                                                   it=100,
                                                   num=50)
    elif dataset_name == 'CMIV':  # Constant Mean and Incremental Variance
        return artificial.generate_gaussian_linear(5,
                                                   0.1,
                                                   0,
                                                   0.02,
                                                   it=100,
                                                   num=50)
    elif dataset_name == 'IMCV':  # Incremental Mean and Constant Variance
        return artificial.generate_gaussian_linear(1,
                                                   0.6,
                                                   0.1,
                                                   0,
                                                   it=100,
                                                   num=50)
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt

from pyFTS.data import TAIEX
data1 = TAIEX.get_dataframe()
data2 = data1.drop(["Openly", "Highest", "Lowermost", "Close", "Volume"],
                   axis=1)
dl = {}
for i in range(10):
    dl[i] = pd.DataFrame()
    dl[i] = data2[data2["Date"].dt.year == 2004 + i]
for i in range(10):
    print(dl[i])

chen = []
weighted = []
itr = 0
sum_of_percentage_error_abl_chen = 0
sum_of_percentage_error_abl_weighted = 0

while (itr < 10):
    train = dl[itr][dl[itr]["Date"].dt.month <= 10]
    test = dl[itr][dl[itr]["Date"].dt.month > 10]
    itr += 1

    previous = 0
    sum = 0
    for value in train['avg']:
        sum += value - previous
        previous = value
Ejemplo n.º 3
0
#!/usr/bin/python
# -*- coding: utf8 -*-

import os
import numpy as np
import matplotlib.pylab as plt
#from mpl_toolkits.mplot3d import Axes3D

import pandas as pd
from pyFTS.common import Transformations

tdiff = Transformations.Differential(1)

from pyFTS.data import TAIEX, SP500, NASDAQ

dataset = TAIEX.get_data()
#dataset = SP500.get_data()[11500:16000]
#dataset = NASDAQ.get_data()
#print(len(dataset))

from pyFTS.partitioners import Grid, Util as pUtil
partitioner = Grid.GridPartitioner(data=dataset[:800], npart=10, transformation=tdiff)


from pyFTS.common import Util as cUtil
from pyFTS.benchmarks import benchmarks as bchmk, Util as bUtil, Measures, knn, quantreg, arima, naive

from pyFTS.models import pwfts, song, chen, ifts, hofts
from pyFTS.models.ensemble import ensemble

model = chen.ConventionalFTS(partitioner=partitioner)
Ejemplo n.º 4
0
from pyFTS.common import Util
from pyFTS.partitioners import Grid
from pyFTS.common import Transformations
from pyFTS.models import chen, hofts
from pyFTS.models.incremental import IncrementalEnsemble, TimeVariant

from pyFTS.data import AirPassengers, artificial

from pyFTS.models.ensemble import ensemble
from pyFTS.models import hofts
from pyFTS.data import TAIEX

from pyFTS.data import TAIEX, NASDAQ, SP500
from pyFTS.common import Util

train = TAIEX.get_data()[1000:1800]
test = TAIEX.get_data()[1800:2000]

from pyFTS.models import hofts

model = ensemble.SimpleEnsembleFTS(fts_method=hofts.WeightedHighOrderFTS,
                                   orders=[1, 2, 3],
                                   partitions=np.arange(10, 50, 5))

model.fit(train)

horizon = 10

intervals05 = model.predict(test[:horizon], type='interval', alpha=.05)
print(intervals05)
Ejemplo n.º 5
0
import os
import numpy as np
from pyFTS.common import Membership, Transformations
from pyFTS.models.nonstationary import common, perturbation, partitioners as nspart, util
from pyFTS.models.nonstationary import nsfts, cvfts
from pyFTS.partitioners import Grid, Entropy
import matplotlib.pyplot as plt
from pyFTS.common import Util as cUtil
import pandas as pd

from pyFTS.data import TAIEX, NASDAQ, SP500, artificial

datasets = {
    "TAIEX":
    TAIEX.get_data()[:4000],
    "SP500":
    SP500.get_data()[10000:14000],
    "NASDAQ":
    NASDAQ.get_data()[:4000],
    # Incremental Mean and Incremental Variance
    "IMIV":
    artificial.generate_gaussian_linear(1, 0.2, 0.2, 0.05, it=100, num=40),
    # Incremental Mean and Incremental Variance, lower bound equals to 0
    "IMIV0":
    artificial.generate_gaussian_linear(1,
                                        0.2,
                                        0.,
                                        0.05,
                                        vmin=0,
                                        it=100,
                                        num=40),
Ejemplo n.º 6
0
def main():

    #vals = TAIEX.get_data()
    data = TAIEX.get_data()
    data = list(data[0:1500]) +  list(data[0:1500]) +  list(data[0:1500])
    vals = data
    #vals = generate_data(1000)
    #vals = vals[:,1];
    #vals = np.concatenate([vals,vals])
    
#     print(vals.shape)
#      
#     train_end = 1500
#      
#     nonifts1 = IncrementalFTS(data = vals[0:train_end], dtype = 'weighted average', partition_method = 'triangular uniform', incremental = False)
#     p1 = nonifts1.predict(vals[train_end:2000])
#      
#     #plt.plot(np.arange(4000),vals[0:4000],'r')
#     #plt.plot(np.arange(train_end,4000),p1,'b')
#      
#     plt.plot(np.arange(2000),vals[0:2000],'r')
#     plt.plot(np.arange(train_end,2000),p1,'b')
#      
#      
#     plt.show()
    
    
    #===========================================================================
    train_end = 2    
    fts = IncrementalFTS(data = vals[0:train_end], dtype = 'weighted average', partition_method = 'triangular uniform', incremental = True)
    # Train FTS
    #fts.generate_rules()
    #fts.print_rules()
     
    p = [];
         
    for i in range(len(vals[2:len(vals)])):
        idx = i
        
        p = fts.run(vals[train_end+i],i,vals,p)
                ################### Plots #################################
        plt.cla()
             
        #axes = plt.gca()
        #axes.set_xlim([-1000,3000])
             
        #fts.fuzzy_sets.plot_fuzzy_sets(-100, 600,begin = -600 , scale = 400, nsteps = 1000)
        fts.fuzzy_sets.plot_fuzzy_sets(2000, 15000,begin = -500 , scale = 400, nsteps = 1000)
             
        plt.plot(np.arange(idx+1)+2,p,'b')    
             
        plt.plot(np.arange(train_end+i),vals[0:train_end+i],'r')
         
        plt.draw()
        plt.pause(1e-17)
        time.sleep(0.01)
             
             
    #     ###########################################################
    #===========================================================================
        
    
    #fts.fuzzy_sets.plot_fuzzy_sets(np.min(vals), np.max(vals),begin = -500 , scale = 400, nsteps = 1000)
    
    #p3 = fts.predict(vals[train_end:(len(vals)-1)])
    #p2 = fts.predict(vals[train_end:(len(vals)-1)], dtype = 'center average')
    #p1 = fts.predict(vals[train_end:(len(vals)-1)], dtype = 'persistence')
    
    #plt.plot(p3)
    #plt.plot(vals[(train_end):(len(vals))])
    #plt.plot(np.linspace(train_end, len(vals), len(p3)),p2)
    #plt.plot(np.linspace(train_end+1, len(vals), len(p3)),p1)
    #plt.plot(np.linspace(train_end+1, len(vals), len(p3)),vals[(train_end+1):(len(vals))])
    
    plt.show()
Ejemplo n.º 7
0
import os
import numpy as np

import pandas as pd
from pyFTS.partitioners import Grid
from pyFTS.common import Transformations
from pyFTS.models import chen, hofts
from pyFTS.models.incremental import IncrementalEnsemble, TimeVariant

from pyFTS.data import AirPassengers, artificial

from pyFTS.models.ensemble import ensemble
from pyFTS.models import hofts
from pyFTS.data import TAIEX

data = TAIEX.get_data()

model = ensemble.EnsembleFTS()

for k in [15, 25, 35]:
    for order in [1, 2]:
        fs = Grid.GridPartitioner(data=data, npart=k)
        tmp = hofts.WeightedHighOrderFTS(partitioner=fs)

        tmp.fit(data)

        model.append_model(tmp)

forecasts = model.predict(data, type='interval', method='quantile', alpha=.05)

from pyFTS.benchmarks import benchmarks as bchmk
Ejemplo n.º 8
0
import os
import numpy as np
import pandas as pd
import matplotlib as plt
import matplotlib.pyplot as plt
#from mpl_toolkits.mplot3d import Axes3D
import importlib
from statsmodels.tsa.stattools import adfuller
from statsmodels.graphics.tsaplots import plot_acf, plot_pacf

from pyFTS.common import Util

from pyFTS.data import TAIEX

taiex = TAIEX.get_data()

train = taiex[:3000]
test = taiex[3000:3200]

from pyFTS.common import Transformations
tdiff = Transformations.Differential(1)
'''
from pyFTS.partitioners import Grid, Util as pUtil
from pyFTS.common import FLR,FuzzySet,Membership,SortedCollection
taiex_fs1 = Grid.GridPartitioner(data=train, npart=30)
taiex_fs2 = Grid.GridPartitioner(data=train, npart=10, transformation=tdiff)

#pUtil.plot_partitioners(train, [taiex_fs1,taiex_fs2], tam=[15,7])

from pyFTS.common import fts,tree
from pyFTS.models import hofts, pwfts
Ejemplo n.º 9
0
@author: ritesh
"""
import numpy as np
'''for ploting the graphs'''
import matplotlib.pyplot as plt
'''pyFTS is a python library to provide methods
to deal with fuzzy time series prediction'''
from pyFTS.data import TAIEX
'''importing grid to create partitions among set'''
from pyFTS.partitioners import Grid
'''importing chen for convetionanl fuzzy time series model'''

from pyFTS.models import chen
'''getting the whole TAIEX dataframe'''
data = TAIEX.get_dataframe()
'''Data Visualistion'''
plt.plot(data['Date'], data['avg'])
'''getting target variable'''

temp = TAIEX.get_data()
train = temp[1:4000]
test = temp[4000:5000]
'''Universe of Discourse Partitioner'''

fig, ax = plt.subplots(nrows=1, ncols=1, figsize=[10, 5])

partitioner = Grid.GridPartitioner(data=train, n=10)
partitioner.plot(ax)
plt.show()
'''creating the chen's model'''