Exemple #1
0
async def download_series_data(request):
    tdb = TimeseriesDB()
    series_name = request.match_info['series_name']
    if series_name is None or series_name == "":
        return web.Response(text = "Series Name is empty, please fill in")
    with io.StringIO() as csvfile:
        csvwriter = csv.writer(csvfile, delimiter=',')
        csvwriter.writerow(["Date","Value"])
        csvwriter.writerows([[s.date.strftime('%m/%d/%Y'),str(s.value)] for s in tdb.get_series_data(series_name)])
        return web.Response(
            # Some browser won't use the filename
            headers={'Content-Disposition': 'Attachment','filename': format_filename(series_name)+'.csv'},
            body=csvfile.getvalue(),
            content_type = 'text/csv'
        )
 def __init__(self, series_name, func, day=0, tdb=TimeseriesDB()):
     from datetime import timedelta
     one_day = timedelta(1)
     self.series_name = series_name
     self.total_delta = one_day * day
     self.func_current_to_past = func
     self.tdb = tdb
 def __init__(self, series_name, day=0, end_day=0, tdb=TimeseriesDB()):
     from datetime import timedelta
     one_day = timedelta(1)
     self.series_name = series_name
     self.total_delta = one_day * day - one_day
     self.start_delta = one_day * end_day
     self.tdb = tdb
Exemple #4
0
async def create_series(request):
    tdb = TimeseriesDB()
    data = await request.post()
    series_name = data['series_name']
    if series_name is None or series_name == "":
        return web.Response(text = "Series Name is empty, please fill in")
    csvfile = data['csv_file']
    csv_datas = None
    try:
        content = csvfile.file.read()
        tr_file = io.StringIO(content.decode("utf-8"))
        csv_datas = csv_to_timeseries(tr_file)
        tr_file.close()
    except Exception as e:
        return web.Response(text = "CSV is malformed, error " + str(e))
    try:
        tdb.create_series(series_name,many_series_data = csv_datas)
    except Exception as e:
        return web.Response(text = "Error in creating series, error " + str(e))
    return web.Response(text = "Finished Creating " +  series_name)
Exemple #5
0
async def update_series(request):
    tdb = TimeseriesDB()
    data = await request.post()
    series_name = data['series_name']
    if series_name is None or series_name == "":
        return web.Response(text = "Series Name is empty, Choose one from list")
    csvfile = data['csv_file']
    csv_datas = None
    try:
        content = csvfile.file.read()
        tr_file = io.StringIO(content.decode("utf-8"))
        csv_datas = csv_to_timeseries(tr_file)
        tr_file.close()
    except Exception as e:
        return web.Response(text = "CSV is malformed, error " + str(e))
    try:
        tdb.insert_or_update_series_data(series_name,csv_datas)
    except Exception as e:
        return web.Response(text = "Error in inserting data, error " + str(e))
    return web.Response(text = "Finished Uploading " + series_name)
def create_query(representation, tdb=TimeseriesDB()):
    ret = {}
    for key in representation:
        query_spec = representation[key]
        query_type = query_spec['query']
        query_param = query_spec['param']
        query_param['tdb'] = tdb

        def string_to_query_type(s):
            if (s == 'average'):
                return average_over
            elif (s == 'divided'):
                return current_divided_by_past
            elif (s == 'raw'):
                return past_value
            raise RuntimeError('query string not recognized')

        ret[key] = string_to_query_type(query_type)(**query_param)
    return caching_query(composite_query(ret))
    def __init__(self, series_name, day=0, tdb=TimeseriesDB()):
        def divide(x, y):
            return x / y

        self.func = current_to_past(series_name, divide, day=day, tdb=tdb)
 def __init__(self, dict_timeseries_query, tdb=TimeseriesDB()):
     self.dict_timeseries_query = dict_timeseries_query
     self.dict_keys = list(dict_timeseries_query.keys())
     self.tdb = tdb
Exemple #9
0
async def delete_series(request):
    tdb = TimeseriesDB()
    data = await request.post()
    series_name = data['series_name']
    tdb.delete_series(series_name)
    return web.Response(text = "Finished Deleting " +  series_name)
Exemple #10
0
async def get_series_data(request):
    tdb = TimeseriesDB()
    series_name = request.match_info['series_name']
    db_result = tdb.get_series_data(series_name)
    response = [(str(r.date),str(r.value)) for r in db_result]
    return web.json_response(response)
Exemple #11
0
def trade_simulator(q,param):
    try:
        from data_maker import f_1,get_data_stream,transformToLogRet
        from InputStandardizer import InputStandardizer
        from FOSELM import EOSELM,FOSELM
        from sofnn import MIMOSOFNN
        from naive_predict import naive_predict
        from financial_math import mean_squared_error,mean_average_error,smape, \
                                    ln_q,ndei,sortino_ratio,sharpe_ratio,total_return
        from PANFIS import MOGENEFIS,MOGSEFS
        from model_test import test_model_stream,antiQDecisionMaker,qDecisionMaker,predictDecisionMaker,antiPredictDecisionMaker,TYPE_PRICE_CHANGE
        from timeseries_query import create_query
        import copy
        c_pair = f_1
        lag = param["data_transformation"]["lag"]
        trade_cost = 0
        leverage = param["strategy"]["leverage"]
        n_input = None
        n_output = None
        decision_maker = None
        strategy_lag = param["strategy"]["lag"]
        strategy_kind = param["strategy"]["strategy_kind"]
        data_query = json.loads(param['data_transformation']['query'],strict=False)
        n_input = len(data_query)
        tdb = TimeseriesDB()
        test_data = None
        if (strategy_kind == "p"):
            decision_maker = predictDecisionMaker(nWait = strategy_lag,leverage = leverage)
            test_data = get_data_stream(c_pair,lag,data_query)
            n_output = 1
        elif (strategy_kind == "antip"):
            decision_maker = antiPredictDecisionMaker(nWait = strategy_lag,leverage = leverage)
            test_data = get_data_stream(c_pair,lag,data_query)
            n_output = 1
        elif (strategy_kind == "q"):
            actions = [leverage*-1 +1, 0, leverage]
            decision_maker = qDecisionMaker(nWait=strategy_lag,actions=actions)
            test_data = transformToLogRet(get_data_stream(c_pair,lag,data_query),actions,stream=True)
            n_output = 3
        elif (strategy_kind == "antiq"):
            actions = [leverage*-1 +1, 0, leverage]
            decision_maker = antiQDecisionMaker(nWait=strategy_lag,actions=actions)
            test_data = transformToLogRet(get_data_stream(c_pair,lag,data_query),actions,stream=True)
            n_output = 3
        
        predict_machine = None
        machine_type = param['machine']['kind']
        if (machine_type == "SOFNN") :
            machine_param = copy.deepcopy(param['machine']['param'])
            standar = machine_param['standardization']
            del machine_param['standardization']
            machine_param['r'] = n_input
            machine_param['rt'] = n_output
            predict_machine = MIMOSOFNN(**machine_param)
            if (standar > 0):
                predict_machine = InputStandardizer(predict_machine,standar)
        if (machine_type == "FOSELM") :
            machine_param = copy.deepcopy(param['machine']['param'])
            standar = machine_param['standardization']
            del machine_param['standardization']
            machine_param['n'] = n_input
            predict_machine = FOSELM(**machine_param)
            if (standar > 0):
                predict_machine = InputStandardizer(predict_machine,standar)
        if (machine_type == "GSEFS") :
            machine_param = copy.deepcopy(param['machine']['param'])
            standar = machine_param['standardization']
            del machine_param['standardization']
            machine_param['n_input'] = n_input
            machine_param['n_output'] = n_output
            predict_machine = MOGSEFS(**machine_param)
            if (standar > 0):
                predict_machine = InputStandardizer(predict_machine,standar)
        if (machine_type == "GENEFIS") :
            machine_param = copy.deepcopy(param['machine']['param'])
            standar = machine_param['standardization']
            del machine_param['standardization']
            machine_param['n_input'] = n_input
            machine_param['n_output'] = n_output
            predict_machine = MOGENEFIS(**machine_param)
            if (standar > 0):
                predict_machine = InputStandardizer(predict_machine,standar)
        
        starting_money = param['etc']['starting_money']
        high_low_vol_rep = {
            'low' : 
                {
                    'query' : 'raw',
                    'param' : {'series_name':'GBP/USD_LOW','day':0}
                },
            'high' : 
                {
                    'query' : 'raw',
                    'param' : {'series_name':'GBP/USD_HIGH','day':0}
                },
            'vol'  : 
                {
                    'query' : 'raw',
                    'param' : {'series_name':'GBP/USD_VOLUME','day':0}
                }
        }
        hlc_query = create_query(high_low_vol_rep,tdb)
        for e in test_model_stream(predict_machine,test_data,decision_maker,trade_cost,starting_money=starting_money):
            print(e)
            if (e[2]== TYPE_PRICE_CHANGE):
                mod_e = list(e)
                mod_e[3] = list(mod_e[3])
                cur_date = mod_e[0]
                hlc = hlc_query.apply(cur_date)
                mod_e[3].append([float(hlc['high']),float(hlc['low']),float(hlc['vol'])])
                q.put_nowait(('DATA',mod_e))
            else:
                q.put_nowait(('DATA',e))
    except Exception as e:
        q.put_nowait(("ERROR",e))
    finally:
        q.put_nowait(("END",None))
Exemple #12
0
async def get_all_series(request):
    tdb = TimeseriesDB()
    return web.json_response([s.name for s in tdb.get_all_series()])
Exemple #13
0
def get_data_stream(c_pair, lag, query_representation=None):
    from timeseries_query import create_query
    from timeseries_db import TimeseriesDB
    from datetime import timedelta
    import datetime
    assert (lag > 0)
    tdb = TimeseriesDB()
    if (query_representation is None):
        query_representation = {
            "a3": {
                "query": "average",
                "param": {
                    "series_name": c_pair,
                    "day": 3
                }
            },
            "a7": {
                "query": "average",
                "param": {
                    "series_name": c_pair,
                    "day": 7
                }
            },
            "a15": {
                "query": "average",
                "param": {
                    "series_name": c_pair,
                    "day": 15
                }
            },
            "a30": {
                "query": "average",
                "param": {
                    "series_name": c_pair,
                    "day": 30
                }
            },
            "current": {
                "query": "raw",
                "param": {
                    "series_name": c_pair
                }
            }
        }

    keys = [key for key in query_representation]
    query = create_query(query_representation, tdb)

    sample_data = []
    test_data = []
    train_data = []

    def is_good_vector(vector):
        if not None in vector:
            if not any([math.isnan(x) for x in vector]):
                return True
        return False

    def input_datum_to_list(input_datum):
        return [input_datum[key] for key in keys]

    def add_datum(datum):
        date = datum[0]
        if (start_testing_date < date < start_sampling_date):
            return datum
        elif (date >= start_sampling_date):
            return datum
        else:
            if (datum[1] != ACT and datum[1] != TEST):
                return datum

    def to_float(ls):
        return [float(x) for x in ls]

    for current_data in tdb.get_series_data(c_pair,
                                            date_start=datetime.date(
                                                2011, 1, 1)):
        one_day = timedelta(1)
        price = float(current_data['value'])
        date = current_data['date']
        print(date)

        price_datum = (date, NEW_PRICE, price)
        datum = add_datum(price_datum)
        if datum is not None:
            yield datum

        train_input_datum = query.apply(current_data['date'] - one_day * lag)
        train_input = input_datum_to_list(train_input_datum)
        train_target = [price]
        if (is_good_vector(train_input)):
            train_input = to_float(train_input)
            input_price = float(train_input_datum['current'])
            target_price = price
            train_datum = (date, TRAIN, (train_input, train_target,
                                         input_price, target_price))
            datum = add_datum(train_datum)
            if datum is not None:
                yield datum

        act_input_datum = query.apply(current_data['date'])
        act_input = input_datum_to_list(act_input_datum)
        if (is_good_vector(act_input)):
            act_input = to_float(act_input)
            act_datum = (date, ACT, act_input)
            datum = add_datum(act_datum)
            if datum is not None:
                yield datum

        test_price = float(
            query.apply(current_data['date'] + one_day * lag)['current'])
        test_input_datum = query.apply(current_data['date'])
        test_input = input_datum_to_list(test_input_datum)
        test_target = [test_price]
        if (is_good_vector(test_input)):
            test_input = to_float(test_input)
            input_price = float(test_input_datum['current'])
            target_price = test_price
            test_datum = (date, TEST, (test_input, test_target, input_price,
                                       target_price))
            datum = add_datum(test_datum)
            if datum is not None:
                yield datum

    print('Done Creating Data')
Exemple #14
0
def get_data(c_pair, lag, Trye=False, query_rep=None, noisy=True):
    from timeseries_query import create_query
    from timeseries_db import TimeseriesDB
    from datetime import timedelta
    import datetime
    assert (lag > 0)
    tdb = TimeseriesDB()
    representation = {
        'a3': {
            'query': 'average',
            'param': {
                'series_name': c_pair,
                'day': 3
            }
        },
        'a7': {
            'query': 'average',
            'param': {
                'series_name': c_pair,
                'day': 7
            }
        },
        'a15': {
            'query': 'average',
            'param': {
                'series_name': c_pair,
                'day': 15
            }
        },
        'a30': {
            'query': 'average',
            'param': {
                'series_name': c_pair,
                'day': 30
            }
        },
        'current': {
            'query': 'raw',
            'param': {
                'series_name': c_pair
            }
        }
    }
    if (query_rep is not None):
        representation = query_rep

    keys = [key for key in representation]
    query = create_query(representation)

    sample_data = []
    test_data = []
    train_data = []

    def is_good_vector(vector):
        if not None in vector:
            if not any([math.isnan(x) for x in vector]):
                return True
        return False

    def input_datum_to_list(input_datum):
        return [input_datum[key] for key in keys]

    def add_datum(datum):
        date = datum[0]
        if (start_testing_date < date < start_sampling_date):
            test_data.append(datum)
        elif (date >= start_sampling_date):
            if (Trye):
                sample_data.append(datum)
            else:
                test_data.append(datum)
        else:
            train_data.append(datum)

    def to_float(ls):
        return [float(x) for x in ls]

    for current_data in tdb.get_series_data(c_pair,
                                            date_start=datetime.date(
                                                2011, 1, 1)):
        one_day = timedelta(1)
        price = float(current_data['value'])
        date = current_data['date']
        if noisy: print(date)

        price_datum = (date, NEW_PRICE, price)
        add_datum(price_datum)

        train_input_datum = query.apply(current_data['date'] - one_day * lag)
        train_input = input_datum_to_list(train_input_datum)
        train_target = [price]
        if (is_good_vector(train_input)):
            train_input = to_float(train_input)
            input_price = float(train_input_datum['current'])
            target_price = price
            train_datum = (date, TRAIN, (train_input, train_target,
                                         input_price, target_price))
            add_datum(train_datum)

        act_input_datum = query.apply(current_data['date'])
        act_input = input_datum_to_list(act_input_datum)
        if (is_good_vector(act_input)):
            act_input = to_float(act_input)
            act_datum = (date, ACT, act_input)
            add_datum(act_datum)

        test_price = float(
            query.apply(current_data['date'] + one_day * lag)['current'])
        test_input_datum = query.apply(current_data['date'])
        test_input = input_datum_to_list(test_input_datum)
        test_target = [test_price]
        if (is_good_vector(test_input)):
            test_input = to_float(test_input)
            input_price = float(test_input_datum['current'])
            target_price = test_price
            test_datum = (date, TEST, (test_input, test_target, input_price,
                                       target_price))
            add_datum(test_datum)

    if noisy: print('Done Creating Data')
    return train_data, test_data, sample_data
Exemple #15
0
-0.5	0.7	0.3	0.2	0.2	-0.2	 -	0.4	0.4	0.1	0.1	0.4	2/1/2013	3/4/2013	4/1/2013	5/2/2013	6/1/2013	7/2/2013	8/1/2013	9/1/2013	10/2/2013	11/1/2013	12/2/2013	1/1/2014
-0.6	0.5	0.2	0.4	-0.1	0.2	-0.3	0.4	 -	0.1	-0.3	 -	2/1/2014	3/4/2014	4/1/2014	5/2/2014	6/1/2014	7/2/2014	8/1/2014	9/1/2014	10/2/2014	11/1/2014	12/2/2014	1/1/2015
-0.9	0.3	0.2	0.2	0.2	 -	-0.2	0.2	-0.1	0.1	 -	0.1	2/1/2015	3/4/2015	4/1/2015	5/2/2015	6/1/2015	7/2/2015	8/1/2015	9/1/2015	10/2/2015	11/1/2015	12/2/2015	1/1/2016
-0.8	0.2	0.4	0.1	0.2	0.2	-0.1	0.3	0.2	0.1	0.2	0.5	2/1/2016	3/3/2016	4/1/2016	5/2/2016	6/1/2016	7/2/2016	8/1/2016	9/1/2016	10/2/2016	11/1/2016	12/2/2016	1/1/2017
-0.5	0.7	0.4	..	..	..	..	..	..	..	..	..	2/1/2017	3/4/2017	4/1/2017	5/2/2017	6/1/2017	7/2/2017	8/1/2017	9/1/2017	10/2/2017	11/1/2017	12/2/2017	1/1/2018"""

from datetime import date
from timeseries_db import TimeseriesDB
from decimal import Decimal

lines = all_txt.split('\n')

dates = sum([line.split('\t')[12:] for line in lines], [])
values = sum([line.split('\t')[:12] for line in lines], [])

tdb = TimeseriesDB()


def is_number(s):
    try:
        float(s)
        return True
    except ValueError:
        return False


date_and_values = [(date, value) for date, value in zip(dates, values)
                   if is_number(value)]


def transform(d):
Exemple #16
0
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 22 09:12:31 2017

@author: calvin-pc
"""

from datetime import date
import csv
from timeseries_db import TimeseriesDB
from decimal import Decimal

tdb = TimeseriesDB()

for s in tdb.get_all_series():
    tdb.delete_series(s.name)

symbol_data = {}
with open('historic_rates.csv', 'r') as csvfile:
    spamreader = csv.reader(csvfile, delimiter=',')
    headers = next(spamreader)
    symbols = headers[1:]
    for symbol in symbols:
        symbol_data[symbol] = []
    for row in spamreader:
        bulan, hari, tahun = row[0].split('/')
        tanggal = date(int(tahun[:4]), int(bulan), int(hari))
        for symbol, val in zip(symbols, row[1:]):
            if val is not None and val != "":
                symbol_data[symbol].append({
                    'date': tanggal,