Пример #1
0
    def __init__(self, config):
        super(CollectStgy, self).__init__()
        self.config = config
        self.p = re.compile("^(\D+)\d{4}$")
        self.logger = LoggerFactory().create_logger(self.config["stgy_id"], level=self.config["log_level"])

        self.symbol_list_map_subscribed = {}
        self.symbol_list_map_all = {}

        if not self.config["just_log"]:
            self.thread_num = self.config["thread_num"]
            from kafka import KafkaProducer
            import happybase
            self.producer = KafkaProducer(bootstrap_servers='hadoop201:9092,hadoop203:9092,hadoop204:9092',)
                                          #value_serializer=lambda v: json.dumps(v).encode('utf-8'))
            self.pool = happybase.ConnectionPool(size=self.thread_num, host='hadoop201',
                                                 autoconnect=True)  # , transport='framed', protocol='binary')
            self.families = {
                'i': dict(max_versions=1),
            }
Пример #2
0
def init_logger(config):
    logger = config["logger"]
    level = logger["level"] if "level" in logger else "info"
    base_dir = logger["base_dir"] if "base_dir" in logger else "./log"
    def_name = logger["def_name"] if "def_name" in logger else "default"
    async_size = logger["async_size"] if "async_size" in logger else 4096
    console_flag = logger["console_flag"] if "console_flag" in logger else True

    iqt.LogFactory.init(level, base_dir, def_name, async_size, console_flag)
    # 截断c++日志文件
    global log_keep_days
    LoggerFactory().trunc_log_dir(log_dir=base_dir, keep_days=log_keep_days)
Пример #3
0
import re
import json
from urllib import request
from urllib.error import URLError, HTTPError

from pyiqt.data.base_data_source import BaseDataSource
from pyiqt.model.bar import Bar
from pyiqt.model.tick import Tick

if __name__ == '__main__':
    # import os
    # import sys
    from pyiqt.util.logger import LoggerFactory
    # base_path = os.path.dirname(os.path.realpath(sys.argv[0]))
    # log_dir = base_path
    logger = LoggerFactory().create_logger(log_dir="../../pylog")
else:
    from pyiqt.util.logger import logger


class SinaDataSource(BaseDataSource):
    def __init__(self):
        self.headers = {}

        self.p = re.compile("^(IF|IC|IH|T|TF)\d{4}$", re.IGNORECASE)
        self.p_tl = re.compile("^.*\((.+)\).*$")

    def _get(self, url):
        req = request.Request(url, headers=self.headers, method="GET")
        try:
            rsp_data = request.urlopen(req).read().decode('gbk')
Пример #4
0
class CollectStgy(stgy.AbstractStgy):
    def __init__(self, config):
        super(CollectStgy, self).__init__()
        self.config = config
        self.p = re.compile("^(\D+)\d{4}$")
        self.logger = LoggerFactory().create_logger(self.config["stgy_id"], level=self.config["log_level"])

        self.symbol_list_map_subscribed = {}
        self.symbol_list_map_all = {}

        if not self.config["just_log"]:
            self.thread_num = self.config["thread_num"]
            from kafka import KafkaProducer
            import happybase
            self.producer = KafkaProducer(bootstrap_servers='hadoop201:9092,hadoop203:9092,hadoop204:9092',)
                                          #value_serializer=lambda v: json.dumps(v).encode('utf-8'))
            self.pool = happybase.ConnectionPool(size=self.thread_num, host='hadoop201',
                                                 autoconnect=True)  # , transport='framed', protocol='binary')
            self.families = {
                'i': dict(max_versions=1),
            }

    def _worker(self, start, step, list_list):
        with self.pool.connection() as connection:
            connection.open()
            table_names = set(connection.tables())
            new_main_table_names = set()
            new_index_table_names = set()
            for i in range(start, len(list_list), step):
                for order_book_id in list_list[i]:
                    matcher = re.match(self.p, order_book_id)
                    if not matcher:
                        continue
                    main_contract = matcher.group(1) + MAIN_CONTRACT_SUFFIX
                    index_contract = matcher.group(1) + INDEX_CONTRACT_SUFFIX
                    for frequency in self.config["frequencies"]:
                        frequency = frequency.strip()
                        table_name = "f_{}:{}".format(frequency, order_book_id)
                        main_table_name = "f_{}:{}".format(frequency, main_contract)
                        index_table_name = "f_{}:{}".format(frequency, index_contract)
                        if table_name.encode('utf-8') not in table_names:
                            self.logger.warn("create table: {}".format(table_name))
                            connection.create_table(table_name, self.families)
                        if frequency != "tick" and main_table_name.encode("utf-8") not in table_names \
                                and main_table_name not in new_main_table_names:
                            self.logger.warn("create table: {}".format(main_table_name))
                            connection.create_table(main_table_name, self.families)
                            new_main_table_names.add(main_table_name)
                        if frequency != "tick" and index_table_name.encode("utf-8") not in table_names \
                                and index_table_name not in new_index_table_names:
                            self.logger.warn("create table: {}".format(index_table_name))
                            connection.create_table(index_table_name, self.families)
                            new_index_table_names.add(index_table_name)

    def _check_tables(self, context):
        with self.pool.connection() as connection:
            connection.open()
            table_names = set(connection.tables())

            if CODE_INFO_TAB_NAME.encode("utf-8") not in table_names:
                connection.create_table(CODE_INFO_TAB_NAME, self.families)
            # self.logger.debug("tables:\n{}".format(table_names))

        import threading
        # self._check_tables(context)
        list_list = list(self.symbol_list_map_subscribed.values())
        for i in range(self.thread_num):
            threading.Thread(target=self._worker, args=(i, self.thread_num, list_list), name='thread-' + str(i)) \
                .start()

    def _build_symbol_list_map(self, universe, symbol_list_map):
        for order_book_id in universe:
            matcher = re.match(self.p, order_book_id)
            if not matcher:
                continue
            underlying_symbol = matcher.group(1)
            if underlying_symbol not in symbol_list_map:
                symbol_list_map[underlying_symbol] = [order_book_id]
            else:
                symbol_list_map[underlying_symbol].append(order_book_id)

    def init(self, context):

        if not self.config["just_log"]:
            self._build_symbol_list_map(context.universe, self.symbol_list_map_subscribed)

            from pyiqt import portal_iqt
            instruments = portal_iqt.Env.instruments
            self._build_symbol_list_map(instruments.keys(), self.symbol_list_map_all)

            self._check_tables(context)

        self.logger.warn("collect init")
        return True

    def handle_tick(self, context, tick):
        # self.logger.debug("collect handle_tick")
        tick.fix_tick()
        # self.logger.debug(str(tick.trading_date))

        new_tick_json = simplejson.dumps(Tick(tick).__dict__)
        self.logger.info(new_tick_json)
        # self.producer.send("test", json.dumps(new_tick).encode("utf-8"))
        if not self.config["just_log"]:
            self.producer.send(self.config["topic"], key=re.match(self.p, tick.order_book_id).group(1).encode('utf-8'),
                               value=new_tick_json.encode('utf-8'))

    def before_trading(self, context):
        self.logger.debug("before_trading")
        if not self.config["just_log"]:
            with self.pool.connection() as connection:
                connection.open()
                map_json = simplejson.dumps(self.symbol_list_map_all)
                self.logger.info("map_json: " + map_json)
                table = connection.table(CODE_INFO_TAB_NAME)
                table.put("UNDERLYING_SYMBOL_LIST_MAP", {"i:value": map_json})

    def after_trading(self, context):
        self.logger.debug("after_trading")
Пример #5
0
# -*- coding: utf-8 -*-
import threading

import redis
import json
# from pyiqt.stgy import portal_stgy as stgy
from pyiqt.util.decorator import singleton
from pyiqt.util.logger import LoggerFactory

logger = LoggerFactory().get_logger()


@singleton
class SingleGridStgyHelper:
    def __init__(self):
        pool = redis.ConnectionPool(host='59.41.39.19',
                                    port=16380,
                                    password='******',
                                    db=0)
        # pool = redis.ConnectionPool(host='192.168.0.203', port=6379, db=0)
        self._r = redis.StrictRedis(connection_pool=pool)
        self._thread = threading.Thread(target=self._run)
        self._stgy_map = {}

    def start(self):
        self._thread.start()

    def _run(self):
        p = self._r.pubsub()
        p.subscribe('C_Signal_20171009')
        while True:
Пример #6
0
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import sys
import setproctitle
import threading

from pyiqt.const import RUN_TYPE
from pyiqt.util.logger import LoggerFactory
from pyiqt.util.redis import RedisFactory
from pyiqt import portal_iqt as iqt
from pyiqt.ctp import portal_ctp as ctp

logger = LoggerFactory().create_logger()

app_base_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
log_keep_days = 3
# 截断python日志文件
LoggerFactory().trunc_log_dir(keep_days=log_keep_days)

# 设置进程名称
setproctitle.setproctitle("main.py")

keep_alive_list = []


def term(sig_num=None, addtion=None):
    import signal
    logger.info('current pid is %s, group id is %s' % (os.getpid(), os.getpgrp()))
    os.killpg(os.getpgid(os.getpid()), signal.SIGKILL)
    signal.signal(signal.SIGTERM, None)