예제 #1
0
# -*- coding: utf-8 -*-
# @time      : 2018/10/19 11:45
# @author    : [email protected]
# @file      : dl_reg_models.py

from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Activation
from keras.optimizers import SGD
from quant_models.utils.logger import Logger
from quant_models.model_processing.models import Model
from quant_models.utils.helper import get_config

config = get_config()
logger = Logger(log_level='DEBUG', handler='ch').get_log()


class Dl_Reg_Model(Model):
    def __init__(self, model_name='linear'):
        self.model_name = model_name
        learning_rate = float(config['dl_reg_model']['learning_rate']) or 0.1
        self.defsgd = SGD(lr=learning_rate)

    def build_model(self, **kwargs):
        units = int(config['dl_reg_model']['units']) or 1
        input_dim = int(config['dl_reg_model']['input_dim']) or 1
        opt = config['dl_reg_model']['optimizer'] or 'sgd'
        loss = config['dl_reg_model']['loss'] or 'mse'
        activation = config['dl_reg_model']['activation'] or 'tanh'
        hidden_units = int(config['dl_reg_model']['hidden_units']) or 10
예제 #2
0
# -*- coding: utf-8 -*-
# @time      : 2019/1/3 12:11
# @author    : [email protected]
# @file      : data_fetcher_db.py

import pyodbc
from collections import defaultdict

import pandas as pd
from WindPy import w
from quant_models.utils.logger import Logger
from quant_models.utils.oracle_helper import OracleHelper

logger = Logger(log_level='INFO', handler='ch').get_log()
w.start()


class DataFetcherDB(object):
    def __init__(self):
        self.datayes_config = {
            "user": "******",
            "pwd": "Gfangm1023_cms2019",
            "host": "10.200.40.170",
            "port": 1521,
            "dbname": "clouddb",
            "mincached": 0,
            "maxcached": 1
        }
        self._dyobj = OracleHelper(self.datayes_config)
        self._jyobj = pyodbc.connect(
            'DRIVER={SQL Server};SERVER=172.21.6.196;DATABASE=JYDB;UID=yfeb;PWD=yfeb'
예제 #3
0
# -*- coding: utf-8 -*-
# @time      : 2018/11/7 10:46
# @author    : [email protected]
# @file      : data_fetcher.py

from quant_models.utils.decorators import parallel_pool
from quant_models.utils.io_utils import write_json_file
from quant_models.utils.logger import Logger
from quant_models.data_processing.data_fetcher_cache import DataFetcherCache
from quant_models.data_processing.data_fetcher_db import DataFetcherDB
from quant_models.data_processing.date_fetcher_api import DataFetcherAPI

logger = Logger(log_level='INFO').get_log()


class DataFetcher(object):
    def __init__(self, source=0):
        '''
        :param source: int; 0 for db, 1 for cache, 2 for applications
        '''
        self._source = source
        self._db_obj = DataFetcherDB()
        self._cache_obj = DataFetcherCache()
        self._db_api = DataFetcherAPI()

    def get_data_fetcher_obj(self, source=0):
        return {
            0: self._db_obj,
            1: self._cache_obj,
            2: self._db_api
        }.get(source or self._source)
예제 #4
0
# @time      : 2018/10/19 11:45
# @author    : [email protected]
# @file      : decorators.py

import time
from quant_models.utils.logger import Logger
from threading import Thread
from functools import wraps
import multiprocessing.pool as mpp
import multiprocessing.pool as mpp
import threading

from functools import wraps
from math import factorial

logger = Logger('log.txt', 'INFO', __name__).get_log()


def timeit(func):
    def timed(*args, **kwargs):
        ts = time.time()
        result = func(*args, **kwargs)
        te = time.time()
        # logger.info('%r (%r, %r) %2.2f sec' % (func.__name__, args, kwargs, te - ts))
        print('%r (%r, %r) %2.2f sec' % (func.__name__, args, kwargs, te - ts))
        return result

    return timed


def limit(number):
예제 #5
0
from quant_models.utils.helper import get_parent_dir
from quant_models.utils.helper import list_files
from quant_models.utils.helper import adjusted_sma
from quant_models.utils.helper import get_source_root
from collections import defaultdict
from collections import OrderedDict
from quant_models.data_processing.data_fetcher import DataFetcher
from quant_models.utils.io_utils import load_json_file
from quant_models.utils.io_utils import write_json_file
from quant_models.utils.logger import Logger
from quant_models.utils.sql_lite_helper import SQLiteHelper

numerical_default = 0.0
config = get_config()

logger = Logger('log.txt', 'DEBUG', __name__).get_log()

_sgn = lambda val: 1 if val >= 0 else -1
# FIXME TO CHECK THE close of connection
g_db_fetcher = DataFetcher()


def get_idx_cons_dy(security_id='', index_date=None, source=0):
    _df = g_db_fetcher.get_data_fetcher_obj(source)
    if source == 1:
        ticker = security_id.split('.')[0]
        return _df.get_idx_cons_dy(ticker=ticker, index_date=index_date)
    search_id_mapping = {
        '000001.XSHG': 1,  # 上证
        '000300.XSHG': 1782,  # 沪深300
        '000016.XSHG': 28,  # 上证50