Beispiel #1
0
async def QA_fetch_stock_day(code,
                             start,
                             end,
                             format='numpy',
                             frequence='day',
                             collections=DATABASE_ASYNC.stock_day):

    '获取股票日线'
    start = str(start)[0:10]
    end = str(end)[0:10]
    #code= [code] if isinstance(code,str) else code

    # code checking
    code = QA_util_code_tolist(code)

    if QA_util_date_valid(end):

        __data = []
        cursor = collections.find({
            'code': {
                '$in': code
            },
            "date_stamp": {
                "$lte": QA_util_date_stamp(end),
                "$gte": QA_util_date_stamp(start)
            }
        })
        #res=[QA_util_dict_remove_key(data, '_id') for data in cursor]
        try:
            res = pd.DataFrame([item async for item in cursor])
        except SyntaxError:
            print('THIS PYTHON VERSION NOT SUPPORT "async for" function')
            pass
        try:
            res = res.drop(
                '_id', axis=1).assign(volume=res.vol).query('volume>1').assign(
                    date=pd.to_datetime(res.date)).drop_duplicates(
                        (['date', 'code'])).set_index('date', drop=False)
            res = res.ix[:, [
                'code', 'open', 'high', 'low', 'close', 'volume', 'amount',
                'date'
            ]]
        except:
            res = None
        if format in ['P', 'p', 'pandas', 'pd']:
            return res
        elif format in ['json', 'dict']:
            return QA_util_to_json_from_pandas(res)
        # 多种数据格式
        elif format in ['n', 'N', 'numpy']:
            return numpy.asarray(res)
        elif format in ['list', 'l', 'L']:
            return numpy.asarray(res).tolist()
        else:
            print(
                "QA Error QA_fetch_stock_day format parameter %s is none of  \"P, p, pandas, pd , json, dict , n, N, numpy, list, l, L, !\" "
                % format)
            return None
    else:
        QA_util_log_info(
            'QA Error QA_fetch_stock_day data parameter start=%s end=%s is not right'
            % (start, end))
Beispiel #2
0
def QA_fetch_backtest_info(user=None, account_cookie=None, strategy=None, stock_list=None, collections=DATABASE.backtest_info):

    return QA_util_to_json_from_pandas(pd.DataFrame([item for item in collections.find(QA_util_to_json_from_pandas(pd.DataFrame([user, account_cookie, strategy, stock_list], index=['user', 'account_cookie', 'strategy', 'stock_list']).dropna().T)[0])]).drop(['_id'], axis=1))
Beispiel #3
0
def QA_fetch_backtest_history(cookie=None, collections=DATABASE.backtest_history):
    return QA_util_to_json_from_pandas(pd.DataFrame([item for item in collections.find(QA_util_to_json_from_pandas(pd.DataFrame([cookie], index=['cookie']).dropna().T)[0])]).drop(['_id'], axis=1))
Beispiel #4
0
    def __saving_work(code, coll_option_day):
        try:
            QA_util_log_info('##JOB12 Now Saving OPTION_DAY==== {}'.format(
                str(code)),
                             ui_log=ui_log)

            # 首选查找数据库 是否 有 这个代码的数据
            # 期权代码 从 10000001 开始编码  10001228
            ref = coll_option_day.find({'code': str(code)[0:8]})
            end_date = str(now_time())[0:10]

            # 当前数据库已经包含了这个代码的数据, 继续增量更新
            # 加入这个判断的原因是因为如果是刚上市的 数据库会没有数据 所以会有负索引问题出现
            if ref.count() > 0:

                # 接着上次获取的日期继续更新
                start_date = ref[ref.count() - 1]['date']
                QA_util_log_info(' 上次获取期权日线数据的最后日期是 {}'.format(start_date),
                                 ui_log=ui_log)

                QA_util_log_info(
                    'UPDATE_OPTION_DAY \n 从上一次下载数据开始继续 Trying update {} from {} to {}'
                    .format(code, start_date, end_date),
                    ui_log=ui_log)
                if start_date != end_date:

                    start_date0 = QA_util_get_next_day(start_date)
                    df0 = QA_fetch_get_option_day(code=code,
                                                  start_date=start_date0,
                                                  end_date=end_date,
                                                  frequence='day',
                                                  ip=None,
                                                  port=None)
                    retCount = df0.iloc[:, 0].size
                    QA_util_log_info(
                        "日期从开始{}-结束{} , 合约代码{} , 返回了{}条记录 , 准备写入数据库".format(
                            start_date0, end_date, code, retCount),
                        ui_log=ui_log)
                    coll_option_day.insert_many(
                        QA_util_to_json_from_pandas(df0))
                else:
                    QA_util_log_info("^已经获取过这天的数据了^ {}".format(start_date),
                                     ui_log=ui_log)

            else:
                start_date = '1990-01-01'
                QA_util_log_info(
                    'UPDATE_OPTION_DAY \n 从新开始下载数据 Trying update {} from {} to {}'
                    .format(code, start_date, end_date),
                    ui_log=ui_log)
                if start_date != end_date:

                    df0 = QA_fetch_get_option_day(code=code,
                                                  start_date=start_date,
                                                  end_date=end_date,
                                                  frequence='day',
                                                  ip=None,
                                                  port=None)
                    retCount = df0.iloc[:, 0].size
                    QA_util_log_info(
                        "日期从开始{}-结束{} , 合约代码{} , 获取了{}条记录 , 准备写入数据库^_^ ".
                        format(start_date, end_date, code, retCount),
                        ui_log=ui_log)

                    coll_option_day.insert_many(
                        QA_util_to_json_from_pandas(df0))
                else:
                    QA_util_log_info("*已经获取过这天的数据了* {}".format(start_date),
                                     ui_log=ui_log)

        except Exception as error0:
            print(error0)
            err.append(str(code))
Beispiel #5
0
def QA_fetch_get_stock_realtime():
    data = ts.get_today_all()
    data_json = QA_util_to_json_from_pandas(data)
    return data_json
Beispiel #6
0
def QA_SU_save_huobi_symbol(market=huobi_EXCHANGE, client=DATABASE,):
    """
    保存火币交易对信息
    """
    market =  market.upper()
    QA_util_log_info('Downloading {:s} symbol list...'.format(market))

    # 保存Huobi API 原始 Symbol 数据备查阅,自动交易用得着
    raw_symbol_lists = QA_util_save_raw_symbols(QA_fetch_huobi_symbols, market)

    if (len(raw_symbol_lists) > 0):
        # 保存到 QUANTAXIS.cryptocurrency_list 数字资产列表,为了跨市场统一查询做数据汇总
        symbol_lists = pd.DataFrame(raw_symbol_lists)

        # market,symbol为 mongodb 索引字段,保存之前必须要检查存在
        symbol_lists['market'] = market
        symbol_lists['category'] = 1
        symbol_lists['name'] = symbol_lists.apply(
            lambda x: '{:s}/{:s}'.
            format(x['base-currency'].upper(),
                   x['quote-currency'].upper()),
            axis=1
        )
        symbol_lists['desc'] = symbol_lists.apply(
            lambda x: '现货: {:s} 兑换 {:s}'.
            format(x['base-currency'],
                   x['quote-currency']),
            axis=1
        )

        # 移除非共性字段,这些字段只有 broker 才关心,做对应交易所 broker 接口的时候在交易所 raw_symbol_lists
        # 数据中读取。火币网超有个性的,注意字段里面的减号,不是下划线!!!
        symbol_lists.drop(
            [
                'amount-precision',
                'leverage-ratio',
                'max-order-amt',
                'min-order-amt',
                'min-order-value',
                'symbol-partition',
                'value-precision'
            ],
            axis=1,
            inplace=True
        )
        if ('_id' in symbol_lists.columns.values):
            # 有时有,必须单独删除
            symbol_lists.drop(
                [
                    '_id',
                ],
                axis=1,
                inplace=True
            )

        symbol_lists['created_at'] = int(
            time.mktime(datetime.datetime.now().utctimetuple())
        )
        symbol_lists['updated_at'] = int(
            time.mktime(datetime.datetime.now().utctimetuple())
        )

        coll_cryptocurrency_list = client.cryptocurrency_list
        coll_cryptocurrency_list.create_index(
            [('market',
              pymongo.ASCENDING),
             ('symbol',
              pymongo.ASCENDING)],
            unique=True
        )
        try:
            query_id = {'market': market}
            if (coll_cryptocurrency_list.count_documents(query_id) > 0):
                # 删掉重复数据
                query_id = {
                    'market': market,
                    'symbol': {
                        '$in': symbol_lists['symbol'].tolist()
                    }
                }
                coll_cryptocurrency_list.delete_many(query_id)
            coll_cryptocurrency_list.insert_many(
                QA_util_to_json_from_pandas(symbol_lists)
            )
            return symbol_lists
        except:
            QA_util_log_expection(
                'QA_SU_save_huobi_symbol(): Insert_many(symbol) to "cryptocurrency_list" got Exception with {} klines'
                .format(len(symbol_lists))
            )
            pass
        return []
Beispiel #7
0
def QA_data_fetch(code='',
                  start='all',
                  end=None,
                  data_type='stock_day',
                  frequence='',
                  format='numpy'):
    """'fetch data from database'
    :param code: code list of stock, index, future, etf
    :param start: date of start, can be str/int of 2019, 201901, '2019-01', '2019-01-01'
    :param end: date of end, can be str/int of 2019, 201901, '2019-01', '2019-01-01'
    :param data_type: stock_day, stock_min etc
    :param frequence: minute freq of min data like 15, 30min
    Returns:
        [type] -- [description]

        感谢@几何大佬的提示
        https://docs.mongodb.com/manual/tutorial/project-fields-from-query-results/#return-the-specified-fields-and-the-id-field-only

    """

    start, end = filter_dates(start, end)
    if end is None:
        QA_util_log_info(
            'QA Error QA_fetch_stock_day data parameter start=%s end=%s is not right'
            % (start, end))
        return None

    if frequence:
        if str(frequence).split('m')[0] in ['1', '5', '15', '30', '60']:
            frequence = str(frequence).split('m')[0] + 'min'
            start = '{} 09:30:00'.format(start)
            end = '{} 15:00:00'.format(end)
        else:
            print(
                "QA Error QA_fetch_stock_min_adv parameter frequence=%s is none of 1min 1m 5min 5m 15min 15m 30min 30m 60min 60m"
                % frequence)
            return None

    db_collection = _database_collections(data_type=data_type,
                                          code=code,
                                          start=start,
                                          end=end,
                                          frequence=frequence)

    # code checking
    # only fot stock currenting
    # todo: check future code
    code = QA_util_code_tolist(code)

    __data = []

    # return None if db_collection is an error message
    if isinstance(db_collection, str):
        print(db_collection)
        return None

    collections = db_collection['collection']

    try:
        cursor = collections.find(db_collection['query'], {"_id": 0},
                                  batch_size=10000)
        res = pd.DataFrame([item for item in cursor])
        if 'datetime' not in res.columns:
            try:
                res.rename({'date': 'datetime'}, axis=1, inplace=True)
            except:
                pass
        else:
            res = res.assign(type=frequence)
        if 'vol' in res.columns:
            res = res.assign(
                volume=res.vol, datetime=pd.to_datetime(
                    res.datetime)).query('volume>1').drop_duplicates(
                        ['datetime', 'code']).set_index('datetime', drop=False)
        else:
            res = res.assign(
                datetime=pd.to_datetime(res.datetime)).drop_duplicates(
                    ['datetime', 'code']).set_index('datetime', drop=False)
        res = round(res[db_collection['columns']], 2)
    except:
        res = None
    if format in ['P', 'p', 'pandas', 'pd']:
        return res
    elif format in ['json', 'dict']:
        return QA_util_to_json_from_pandas(res)
    # 多种数据格式
    elif format in ['n', 'N', 'numpy']:
        return numpy.asarray(res)
    elif format in ['list', 'l', 'L']:
        return numpy.asarray(res).tolist()
    else:
        print(
            "QA Error QA_fetch_stock_day format parameter %s is none of  \"P, p, pandas, pd , json, dict , n, N, numpy, list, l, L, !\" "
            % format)
        return None
Beispiel #8
0
def QA_SU_save_bitfinex_symbol(market="bitfinex", client=DATABASE, ):
    """
    保存Bitfinex交易对信息
    """
    QA_util_log_info('Downloading {:s} symbol list...'.format(market))

    # 保存 Bitfinex API 原始 Symbol 数据备查阅,自动交易用得着
    raw_symbol_lists = QA_util_save_raw_symbols(
        QA_fetch_bitfinex_symbols,
        market
    )
    if (len(raw_symbol_lists) > 0):
        # 保存到 QUANTAXIS.crypto_asset_list 数字资产列表,为了跨市场统一查询做数据汇总
        symbol_lists = pd.DataFrame(raw_symbol_lists)

        # market,symbol为 mongodb 索引字段,保存之前必须要检查存在
        symbol_lists['market'] = market
        symbol_lists['category'] = 1
        symbol_lists.rename(
            {
                'baseAssetPrecision': 'price_precision',
                'baseAsset': 'base_currency',
                'quoteAsset': 'quote_currency',
                'status': 'state',
            },
            axis=1,
            inplace=True
        )
        symbol_lists['name'] = symbol_lists.apply(
            lambda x: '{:s}/{:s}'.
            format(x['base_currency'].upper(),
                   x['quote_currency'].upper()),
            axis=1
        )
        symbol_lists['desc'] = symbol_lists['name']

        # 移除非共性字段,这些字段只有 broker 才关心,做对应交易所 broker 接口的时候在交易所 raw_symbol_lists
        # 数据中读取。
        symbol_lists.drop(
            [
                'price_precision',
                'baseCommissionPrecision',
                'quotePrecision',
                'filters',
                'icebergAllowed',
                'isMarginTradingAllowed',
                'isSpotTradingAllowed',
                'ocoAllowed',
                'orderTypes',
                'quoteCommissionPrecision',
                'quoteOrderQtyMarketAllowed',
            ],
            axis=1,
            inplace=True
        )
        if ('_id' in symbol_lists.columns.values):
            # 有时有,必须单独删除
            symbol_lists.drop(
                [
                    '_id',
                ],
                axis=1,
                inplace=True
            )
        # 删除不交易的交易对
        symbol_lists = symbol_lists[symbol_lists['state'].isin(['TRADING'])]
        symbol_lists['created_at'] = int(
            time.mktime(datetime.datetime.now().utctimetuple())
        )
        symbol_lists['updated_at'] = int(
            time.mktime(datetime.datetime.now().utctimetuple())
        )

        coll_crypto_asset_list = client.crypto_asset_list
        coll_crypto_asset_list.create_index(
            [('market',
              pymongo.ASCENDING),
             ('symbol',
              pymongo.ASCENDING)],
            unique=True
        )
        try:
            query_id = {'market': market}
            if (coll_crypto_asset_list.count_documents(query_id) > 0):
                # 删掉重复数据
                query_id = {
                    'market': market,
                    'symbol': {
                        '$in': symbol_lists['symbol'].tolist()
                    }
                }
                coll_crypto_asset_list.delete_many(query_id)
            coll_crypto_asset_list.insert_many(
                QA_util_to_json_from_pandas(symbol_lists)
            )
            return symbol_lists
        except:
            QA_util_log_expection(
                'QA_SU_save_bitfinex_symbol: Insert_many(symbol) to "crypto_asset_list" got Exception {}'
                .format(len(data))
            )
            pass
        return []
 def to_json(self):
     """
     转换DataStruct为json
     """
     return QA_util_to_json_from_pandas(self.data.reset_index())
Beispiel #10
0
 def from_dataframe(self, dataframe):
     self.list = [
         QA_QAMarket_bid().from_dict(item)
         for item in QA_util_to_json_from_pandas(dataframe)
     ]
     return self.list
Beispiel #11
0
 def __saving_work(code, coll):
     QA_util_log_info("##JOB03 Now Saving STOCK_MIN ==== {}".format(code),
                      ui_log=ui_log)
     try:
         for type_ in ["1min", "5min", "15min", "30min", "60min"]:
             col_filter = {"code": str(code)[5:], "type": type_}
             ref_ = coll.find(col_filter)
             end_time = str(now_time())[0:19]
             if coll.count_documents(col_filter) > 0:
                 start_time = ref_[coll.count_documents(col_filter) -
                                   1]["datetime"]
                 print(start_time)
                 QA_util_log_info(
                     "##JOB03.{} Now Saving {} from {} to {} == {}".format(
                         ["1min", "5min", "15min", "30min",
                          "60min"].index(type_),
                         str(code)[5:],
                         start_time,
                         end_time,
                         type_,
                     ),
                     ui_log=ui_log,
                 )
                 if start_time != end_time:
                     df = history(symbol=code,
                                  start_time=start_time,
                                  end_time=end_time,
                                  frequency=MIN_SEC[type_],
                                  df=True)
                     __data = __transform_gm_to_qa(df, type_)
                     if len(__data) > 1:
                         # print(QA_util_to_json_from_pandas(__data)[1::])
                         # print(__data)
                         coll.insert_many(
                             QA_util_to_json_from_pandas(__data)[1::])
             else:
                 start_time = "2015-01-01 09:30:00"
                 QA_util_log_info(
                     "##JOB03.{} Now Saving {} from {} to {} == {}".format(
                         ["1min", "5min", "15min", "30min",
                          "60min"].index(type_),
                         str(code)[5:],
                         start_time,
                         end_time,
                         type_,
                     ),
                     ui_log=ui_log,
                 )
                 if start_time != end_time:
                     df = history(symbol=code,
                                  start_time=start_time,
                                  end_time=end_time,
                                  frequency=MIN_SEC[type_],
                                  df=True)
                     __data = __transform_gm_to_qa(df, type_)
                     if len(__data) > 1:
                         # print(__data)
                         coll.insert_many(
                             QA_util_to_json_from_pandas(__data)[1::])
                         # print(QA_util_to_json_from_pandas(__data)[1::])
     except Exception as e:
         QA_util_log_info(e, ui_log=ui_log)
         err.append(code)
         QA_util_log_info(err, ui_log=ui_log)
Beispiel #12
0
 def from_dataframe(self, dataframe):
     bid_list = []
     for item in QA_util_to_json_from_pandas(dataframe):
         bid_list.append(self.from_dict(item))
     return bid_list
Beispiel #13
0
    def __saving_work(self, code):
        def __QA_log_info(code, end_time, start_time):
            def loginfo(prefix='', astr='', listCounts=5):
                if len(self._loginfolist) < listCounts:
                    self._loginfolist.append(astr)
                else:
                    str = ''
                    for i in range(len(self._loginfolist)):
                        str += self._loginfolist[i] + ' '
                    str += astr
                    QA_util_log_info(prefix.format(str), self.ui_log)
                    self._loginfolist.clear()

            index_or_etf = self.get_index_or_etf_from_code(code)
            prefix = '##JOB04 Saving {}_DAY ==== Trying updating\n{}'.format(
                index_or_etf, '{}')
            loginfo(prefix, ' {} from {} to {}'.format(code, start_time,
                                                       end_time))
            # log_info = '##JOB04 Saving {}_DAY====\nTrying updating {} from {} to {}'.format(
            #     index_or_etf,
            #     code,
            #     start_time,
            #     end_time
            # )
            # QA_util_log_info(
            #     log_info,
            #     ui_log=self.ui_log
            # )

        try:
            search_cond = {'code': str(code)[0:6]}
            ref_ = get_coll().find(search_cond)
            ref_count = get_coll().count_documents(search_cond)

            end_time = str(now_time())[0:10]
            if ref_count > 0:
                start_time = ref_[ref_count - 1]['date']

                __QA_log_info(code, end_time, start_time)

                if start_time != end_time:
                    get_coll().insert_many(
                        QA_util_to_json_from_pandas(
                            QA_fetch_get_index_day(
                                str(code), QA_util_get_next_day(start_time),
                                end_time)))
            else:
                try:
                    start_time = '1990-01-01'
                    __QA_log_info(code, end_time, start_time)
                    get_coll().insert_many(
                        QA_util_to_json_from_pandas(
                            QA_fetch_get_index_day(str(code), start_time,
                                                   end_time)))
                except Exception as e:
                    start_time = '2009-01-01'
                    __QA_log_info(code, end_time, start_time)
                    get_coll().insert_many(
                        QA_util_to_json_from_pandas(
                            QA_fetch_get_index_day(str(code), start_time,
                                                   end_time)))
        except Exception as e:
            QA_util_log_info(e, ui_log=self.ui_log)
            self.err.append(str(code))
            QA_util_log_info(self.err, ui_log=self.ui_log)
    def QA_fetch_eastmoney_stock_zjlx(
            self,
            str_stock_code=None,
            strStartDate=None,
            strEndDate=None,
            strFormat='numpy',
            collections=DATABASE.eastmoney_stock_zjlx):

        codeArray = QA_util_code_tolist(str_stock_code)
        if QA_util_date_valid(strEndDate):

            if strStartDate == None and strEndDate == None:

                cursor = collections.find({'stock_code': {'$in': codeArray}})

                items = [item for item in cursor]
                res = pd.DataFrame(items)

                #todo fixhere reset pandas index

                if format in ['P', 'p', 'pandas', 'pd']:
                    return res
                elif format in ['json', 'dict']:
                    return QA_util_to_json_from_pandas(res)
                    # 多种数据格式
                elif format in ['n', 'N', 'numpy']:
                    return np.asarray(res)
                elif format in ['list', 'l', 'L']:
                    return np.asarray(res).tolist()
                else:
                    print(
                        "QA Error QA_fetch_stock_day format parameter %s is none of  \"P, p, pandas, pd , json, dict , n, N, numpy, list, l, L, !\" "
                        % format)
                    return None
        else:
            cursor2 = collections.find({
                'stock_code': {
                    '$in': codeArray
                },
                'date': {
                    '$lte': strEndDate,
                    '$gte': strStartDate
                }
            })

            items = [item for item in cursor2]
            res = pd.DataFrame(items)

            # todo fixhere reset pandas index

            if format in ['P', 'p', 'pandas', 'pd']:
                return res
            elif format in ['json', 'dict']:
                return QA_util_to_json_from_pandas(res)
                # 多种数据格式
            elif format in ['n', 'N', 'numpy']:
                return np.asarray(res)
            elif format in ['list', 'l', 'L']:
                return np.asarray(res).tolist()
            else:
                print(
                    "QA Error QA_fetch_stock_day format parameter %s is none of  \"P, p, pandas, pd , json, dict , n, N, numpy, list, l, L, !\" "
                    % format)
                return None
            #
            # sizeRec = cursor2.count()
            # firstRec = cursor2[0]
            # lastRec = cursor2[sizeRec-1]
            # allItems2 = [item for item in cursor2]
            # print(allItems2)
        pass
def QA_SU_save_index_quant_day(code=None,
                               start_date=None,
                               end_date=None,
                               ui_log=None,
                               ui_progress=None):
    if start_date is None:
        if end_date is None:
            start_date = QA_util_get_pre_trade_date(QA_util_today_str(), 1)
            end_date = QA_util_today_str()
        elif end_date is not None:
            start_date = '2008-01-01'
    elif start_date is not None:
        if end_date == None:
            end_date = QA_util_today_str()
        elif end_date is not None:
            if end_date < start_date:
                print('end_date should large than start_date')
    if code is None:
        code = list(QA_fetch_index_list_adv()['code'])
        code = [i for i in code if i.startswith('880') == True]
        code = [i for i in code if i.startswith('8800') == False]
        code = [i for i in code if i.startswith('8807') == False]
        code = [i for i in code if i.startswith('8808') == False]

    index = DATABASE.index_quant_data_index
    index.create_index([("code", ASCENDING), ("date_stamp", ASCENDING)],
                       unique=True)
    week = DATABASE.index_quant_data_week
    week.create_index([("code", ASCENDING), ("date_stamp", ASCENDING)],
                      unique=True)
    alpha = DATABASE.index_quant_data_alpha
    alpha.create_index([("code", ASCENDING), ("date_stamp", ASCENDING)],
                       unique=True)
    try:
        data1 = QA_fetch_get_index_quant_data(code, start_date, end_date)
    except:
        data1 = None
    else:
        QA_util_log_info(
            '##JOB got Data index quant data ============== from {from_} to {to_} '
            .format(from_=start_date, to_=end_date), ui_log)
    deal_date_list = QA_util_get_trade_range(start_date, end_date)
    if deal_date_list is None:
        print('not a trading day')
    elif data1 is None:
        print('not a trading day')
    else:
        for deal_date in deal_date_list:
            if QA_util_if_trade(deal_date):
                data = data1[data1['date'] == deal_date]
            else:
                data = None
            if data is not None:
                data = data.drop_duplicates((['code', 'date']))
                alpha_data = data[[
                    'code', 'date', 'date_stamp', 'alpha_001', 'alpha_002',
                    'alpha_003', 'alpha_004', 'alpha_005', 'alpha_006',
                    'alpha_007', 'alpha_008', 'alpha_009', 'alpha_010',
                    'alpha_012', 'alpha_013', 'alpha_014', 'alpha_015',
                    'alpha_016', 'alpha_017', 'alpha_018', 'alpha_019',
                    'alpha_020', 'alpha_021', 'alpha_022', 'alpha_023',
                    'alpha_024', 'alpha_025', 'alpha_026', 'alpha_028',
                    'alpha_029', 'alpha_031', 'alpha_032', 'alpha_033',
                    'alpha_034', 'alpha_035', 'alpha_036', 'alpha_037',
                    'alpha_038', 'alpha_039', 'alpha_040', 'alpha_041',
                    'alpha_042', 'alpha_044', 'alpha_045', 'alpha_046',
                    'alpha_047', 'alpha_048', 'alpha_049', 'alpha_052',
                    'alpha_053', 'alpha_054', 'alpha_055', 'alpha_056',
                    'alpha_057', 'alpha_058', 'alpha_059', 'alpha_061',
                    'alpha_062', 'alpha_063', 'alpha_064', 'alpha_065',
                    'alpha_066', 'alpha_067', 'alpha_068', 'alpha_071',
                    'alpha_072', 'alpha_074', 'alpha_077', 'alpha_078',
                    'alpha_080', 'alpha_082', 'alpha_083', 'alpha_085',
                    'alpha_086', 'alpha_087', 'alpha_088', 'alpha_089',
                    'alpha_090', 'alpha_091', 'alpha_092', 'alpha_093',
                    'alpha_096', 'alpha_098', 'alpha_099', 'alpha_102',
                    'alpha_103', 'alpha_104', 'alpha_105', 'alpha_106',
                    'alpha_107', 'alpha_108', 'alpha_109', 'alpha_113',
                    'alpha_114', 'alpha_115', 'alpha_116', 'alpha_117',
                    'alpha_118', 'alpha_119', 'alpha_120', 'alpha_122',
                    'alpha_123', 'alpha_124', 'alpha_125', 'alpha_126',
                    'alpha_129', 'alpha_130', 'alpha_133', 'alpha_134',
                    'alpha_135', 'alpha_138', 'alpha_139', 'alpha_141',
                    'alpha_142', 'alpha_145', 'alpha_148', 'alpha_152',
                    'alpha_153', 'alpha_156', 'alpha_158', 'alpha_159',
                    'alpha_160', 'alpha_161', 'alpha_162', 'alpha_163',
                    'alpha_164', 'alpha_167', 'alpha_168', 'alpha_169',
                    'alpha_170', 'alpha_171', 'alpha_172', 'alpha_173',
                    'alpha_175', 'alpha_176', 'alpha_177', 'alpha_178',
                    'alpha_179', 'alpha_184', 'alpha_185', 'alpha_186',
                    'alpha_187', 'alpha_188', 'alpha_189', 'alpha_191'
                ]]
                index_data = data[[
                    'code', 'date', 'date_stamp', 'AD', 'ADDI', 'ADDI_C',
                    'ADTM', 'ADX', 'ADXR', 'ADX_C', 'AD_C', 'AMA', 'ASI',
                    'ASIT', 'ATR', 'ATRR', 'BBI', 'BIAS1', 'BIAS2', 'BIAS3',
                    'BODY', 'BODY_ABS', 'BOLL', 'CCI', 'CHO', 'DDD', 'DDI',
                    'DDI_C', 'DEA', 'DI1', 'DI2', 'DIF', 'DI_M', 'KDJ_D',
                    'KDJ_J', 'KDJ_K', 'LB', 'MA1', 'MA10', 'MA120', 'MA180',
                    'MA2', 'MA20', 'MA3', 'MA4', 'MA5', 'MA60', 'MAADTM',
                    'MACD', 'MACHO', 'MAOSC', 'MAVPT', 'MFI', 'MFI_C',
                    'MIKE_BOLL', 'MR', 'MS', 'MTM', 'MTMMA', 'OBV', 'OBV_C',
                    'OSC', 'PRICE_PCG', 'ROC', 'ROCMA', 'RSI1', 'RSI1_C',
                    'RSI2', 'RSI2_C', 'RSI3', 'RSI3_C', 'RSV', 'SHA_LOW',
                    'SHA_UP', 'SKDJ_D', 'SKDJ_K', 'SR', 'SS', 'TR', 'UB',
                    'VPT', 'VR', 'VRSI', 'VRSI_C', 'VSTD', 'WIDTH', 'WR',
                    'WR1', 'WR2', 'WS', 'CCI_CROSS4', 'DMA_CROSS1',
                    'CDLMORNINGDOJISTAR', 'CDLSEPARATINGLINES', 'WR_CROSS1',
                    'KDJ_CROSS2', 'CDLHARAMICROSS', 'CDLEVENINGSTAR',
                    'BBI_CROSS2', 'VPT_CROSS1', 'CROSS_SC', 'CDLSHORTLINE',
                    'SKDJ_CROSS1', 'CDLABANDONEDBABY', 'CDL3STARSINSOUTH',
                    'CDLUNIQUE3RIVER', 'CDLKICKINGBYLENGTH', 'CDLHOMINGPIGEON',
                    'CDLTAKURI', 'CDL3BLACKCROWS', 'CDLSTICKSANDWICH',
                    'CDLTASUKIGAP', 'VPT_CROSS2', 'CDLSHOOTINGSTAR',
                    'CDLCONCEALBABYSWALL', 'WR_CROSS2', 'ADTM_CROSS1',
                    'BIAS_CROSS2', 'MTM_CROSS4', 'CCI_CROSS3', 'CDLHAMMER',
                    'CDLMARUBOZU', 'MACD_TR', 'CDL3INSIDE',
                    'CDLUPSIDEGAP2CROWS', 'MTM_CROSS1', 'CDLGRAVESTONEDOJI',
                    'KDJ_CROSS1', 'CDLMATHOLD', 'MIKE_TR', 'CDLLADDERBOTTOM',
                    'CDLMORNINGSTAR', 'OSC_CROSS2', 'OSC_CROSS4', 'ADX_CROSS2',
                    'DI_CROSS1', 'MTM_CROSS2', 'CDLDRAGONFLYDOJI',
                    'CCI_CROSS2', 'CDLSPINNINGTOP', 'CDLHIKKAKEMOD',
                    'DMA_CROSS2', 'MIKE_WRJC', 'CROSS_JC', 'OSC_CROSS3',
                    'RSI_CROSS1', 'MIKE_WSJC', 'MTM_CROSS3', 'CDLADVANCEBLOCK',
                    'BIAS_CROSS1', 'CDLCLOSINGMARUBOZU', 'CDL3OUTSIDE',
                    'VPT_CROSS3', 'CDLEVENINGDOJISTAR', 'CDL2CROWS',
                    'CDLHANGINGMAN', 'ADTM_CROSS2', 'CDLMATCHINGLOW',
                    'CDLHIKKAKE', 'CDLKICKING', 'CDLCOUNTERATTACK',
                    'CHO_CROSS1', 'CDLHARAMI', 'BBI_CROSS1', 'MIKE_WRSC',
                    'CDLINVERTEDHAMMER', 'CCI_CROSS1', 'CDLBREAKAWAY',
                    'CDLGAPSIDESIDEWHITE', 'DI_CROSS2', 'CDL3WHITESOLDIERS',
                    'CDLTRISTAR', 'CDLXSIDEGAP3METHODS', 'CDLPIERCING',
                    'VPT_CROSS4', 'CDLLONGLINE', 'CDLDOJI', 'CDLHIGHWAVE',
                    'CDLSTALLEDPATTERN', 'ADX_CROSS1', 'CDL3LINESTRIKE',
                    'CDLBELTHOLD', 'CDLINNECK', 'CDLONNECK', 'CDLRICKSHAWMAN',
                    'CDLTHRUSTING', 'CDLIDENTICAL3CROWS', 'SKDJ_CROSS2',
                    'CDLDOJISTAR', 'RSI_CROSS2', 'OSC_CROSS1',
                    'CDLRISEFALL3METHODS', 'CDLLONGLEGGEDDOJI', 'MIKE_WSSC',
                    'CDLDARKCLOUDCOVER', 'CHO_CROSS2', 'CDLENGULFING'
                ]]
                week_data = data[[
                    'code', 'date', 'date_stamp', 'AD_WK', 'ADDI_WK',
                    'ADDI_C_WK', 'ADTM_WK', 'ADX_WK', 'ADXR_WK', 'ADX_C_WK',
                    'AD_C_WK', 'AMA_WK', 'ASI_WK', 'ASIT_WK', 'ATR_WK',
                    'ATRR_WK', 'BBI_WK', 'BIAS1_WK', 'BIAS2_WK', 'BIAS3_WK',
                    'BODY_WK', 'BODY_ABS_WK', 'BOLL_WK', 'CCI_WK', 'CHO_WK',
                    'DDD_WK', 'DDI_WK', 'DDI_C_WK', 'DEA_WK', 'DI1_WK',
                    'DI2_WK', 'DIF_WK', 'DI_M_WK', 'KDJ_D_WK', 'KDJ_J_WK',
                    'KDJ_K_WK', 'LB_WK', 'MA1_WK', 'MA10_WK', 'MA120_WK',
                    'MA180_WK', 'MA2_WK', 'MA20_WK', 'MA3_WK', 'MA4_WK',
                    'MA5_WK', 'MA60_WK', 'MAADTM_WK', 'MACD_WK', 'MACHO_WK',
                    'MAOSC_WK', 'MAVPT_WK', 'MFI_WK', 'MFI_C_WK',
                    'MIKE_BOLL_WK', 'MR_WK', 'MS_WK', 'MTM_WK', 'MTMMA_WK',
                    'OBV_WK', 'OBV_C_WK', 'OSC_WK', 'PRICE_PCG_WK', 'ROC_WK',
                    'ROCMA_WK', 'RSI1_WK', 'RSI1_C_WK', 'RSI2_WK', 'RSI2_C_WK',
                    'RSI3_WK', 'RSI3_C_WK', 'RSV_WK', 'SHA_LOW_WK',
                    'SHA_UP_WK', 'SKDJ_D_WK', 'SKDJ_K_WK', 'SR_WK', 'SS_WK',
                    'TR_WK', 'UB_WK', 'VPT_WK', 'VR_WK', 'VRSI_WK',
                    'VRSI_C_WK', 'VSTD_WK', 'WIDTH_WK', 'WR_WK', 'WR1_WK',
                    'WR2_WK', 'WS_WK', 'CDLDRAGONFLYDOJI_WK', 'MIKE_WRJC_WK',
                    'CDLRICKSHAWMAN_WK', 'MIKE_WSSC_WK', 'DI_CROSS2_WK',
                    'CDLHARAMI_WK', 'BBI_CROSS2_WK', 'VPT_CROSS2_WK',
                    'CDLBELTHOLD_WK', 'CDLHAMMER_WK', 'CDL3INSIDE_WK',
                    'CDLTRISTAR_WK', 'OSC_CROSS1_WK', 'CDLMARUBOZU_WK',
                    'CDLTASUKIGAP_WK', 'CDLSPINNINGTOP_WK',
                    'CDLDARKCLOUDCOVER_WK', 'CDL3BLACKCROWS_WK',
                    'BIAS_CROSS2_WK', 'OSC_CROSS3_WK', 'CHO_CROSS1_WK',
                    'CDLMORNINGSTAR_WK', 'ADX_CROSS2_WK', 'CDLINNECK_WK',
                    'ADTM_CROSS2_WK', 'MACD_TR_WK', 'CDLDOJI_WK',
                    'MTM_CROSS1_WK', 'CDLCOUNTERATTACK_WK', 'CDLLONGLINE_WK',
                    'KDJ_CROSS1_WK', 'CDLADVANCEBLOCK_WK', 'CDLHANGINGMAN_WK',
                    'KDJ_CROSS2_WK', 'ADX_CROSS1_WK', 'CDLMATHOLD_WK',
                    'CDLABANDONEDBABY_WK', 'WR_CROSS2_WK', 'MIKE_WRSC_WK',
                    'OSC_CROSS2_WK', 'CDLGAPSIDESIDEWHITE_WK', 'CROSS_JC_WK',
                    'MTM_CROSS4_WK', 'CDLSHOOTINGSTAR_WK', 'ADTM_CROSS1_WK',
                    'CDL3OUTSIDE_WK', 'CDLLONGLEGGEDDOJI_WK',
                    'CDL3LINESTRIKE_WK', 'CDLHIKKAKE_WK',
                    'CDLSTALLEDPATTERN_WK', 'MTM_CROSS2_WK', 'SKDJ_CROSS2_WK',
                    'CDLEVENINGDOJISTAR_WK', 'OSC_CROSS4_WK', 'CDLTAKURI_WK',
                    'CDLSHORTLINE_WK', 'CROSS_SC_WK', 'CDLMATCHINGLOW_WK',
                    'CCI_CROSS4_WK', 'MIKE_WSJC_WK', 'CDLHOMINGPIGEON_WK',
                    'VPT_CROSS1_WK', 'CDLCLOSINGMARUBOZU_WK', 'WR_CROSS1_WK',
                    'CDLTHRUSTING_WK', 'BBI_CROSS1_WK', 'DMA_CROSS2_WK',
                    'RSI_CROSS1_WK', 'CDLRISEFALL3METHODS_WK',
                    'CDLHIKKAKEMOD_WK', 'CCI_CROSS3_WK',
                    'CDLKICKINGBYLENGTH_WK', 'CDLLADDERBOTTOM_WK',
                    'DI_CROSS1_WK', 'VPT_CROSS3_WK', 'CDLHARAMICROSS_WK',
                    'CHO_CROSS2_WK', 'CCI_CROSS2_WK', 'CDL3STARSINSOUTH_WK',
                    'CDLXSIDEGAP3METHODS_WK', 'RSI_CROSS2_WK', 'MIKE_TR_WK',
                    'CDLDOJISTAR_WK', 'CDLCONCEALBABYSWALL_WK',
                    'CDLPIERCING_WK', 'CDLHIGHWAVE_WK',
                    'CDLMORNINGDOJISTAR_WK', 'CDLSTICKSANDWICH_WK',
                    'CDLGRAVESTONEDOJI_WK', 'CDLINVERTEDHAMMER_WK',
                    'CDLKICKING_WK', 'CDLSEPARATINGLINES_WK',
                    'CDLBREAKAWAY_WK', 'MTM_CROSS3_WK', 'CDLUNIQUE3RIVER_WK',
                    'CCI_CROSS1_WK', 'DMA_CROSS1_WK', 'VPT_CROSS4_WK',
                    'CDLEVENINGSTAR_WK', 'CDL2CROWS_WK',
                    'CDL3WHITESOLDIERS_WK', 'CDLIDENTICAL3CROWS_WK',
                    'CDLUPSIDEGAP2CROWS_WK', 'CDLENGULFING_WK',
                    'SKDJ_CROSS1_WK', 'BIAS_CROSS1_WK', 'CDLONNECK_WK'
                ]]
                QA_util_log_info(
                    '##JOB01 Pre Data index quant data ============== {deal_date} '
                    .format(deal_date=deal_date), ui_log)
                alpha_res = QA_util_to_json_from_pandas(alpha_data)
                index_res = QA_util_to_json_from_pandas(index_data)
                week_res = QA_util_to_json_from_pandas(week_data)
                QA_util_log_info(
                    '##JOB02 Got Data index quant data ============== {deal_date}'
                    .format(deal_date=deal_date), ui_log)

                try:
                    alpha.insert_many(alpha_res, ordered=False)
                    QA_util_log_info(
                        '##JOB03 Now index quant data alpha saved ============== {deal_date} '
                        .format(deal_date=deal_date), ui_log)
                except Exception as e:
                    if isinstance(e, MemoryError):
                        alpha.insert_many(alpha_res, ordered=True)
                    elif isinstance(e, pymongo.bulk.BulkWriteError):
                        pass

                try:
                    week.insert_many(week_res, ordered=False)
                    QA_util_log_info(
                        '##JOB03 Now index quant data week saved ============== {deal_date} '
                        .format(deal_date=deal_date), ui_log)
                except Exception as e:
                    if isinstance(e, MemoryError):
                        week.insert_many(week_res, ordered=True)
                    elif isinstance(e, pymongo.bulk.BulkWriteError):
                        pass

                try:
                    index.insert_many(index_res, ordered=False)
                    QA_util_log_info(
                        '##JOB03 Now index quant data index saved ============== {deal_date} '
                        .format(deal_date=deal_date), ui_log)
                except Exception as e:
                    if isinstance(e, MemoryError):
                        index.insert_many(index_res, ordered=True)
                    elif isinstance(e, pymongo.bulk.BulkWriteError):
                        pass
            else:
                QA_util_log_info(
                    '##JOB01 No Data index_quant_datat ============== {deal_date} '
                    .format(deal_date=deal_date), ui_log)
Beispiel #16
0
 def __init__(self, market_data):
     if isinstance(market_data, dict):
         self.market_data = market_data
     elif isinstance(market_data, pd.DataFrame):
         self.market_data = QA_util_to_json_from_pandas(market_data)
Beispiel #17
0
async def QA_fetch_stock_min(code,
                             start,
                             end,
                             format='numpy',
                             frequence='1min',
                             collections=DATABASE_ASYNC.stock_min):
    '获取股票分钟线'
    if frequence in ['1min', '1m']:
        frequence = '1min'
    elif frequence in ['5min', '5m']:
        frequence = '5min'
    elif frequence in ['15min', '15m']:
        frequence = '15min'
    elif frequence in ['30min', '30m']:
        frequence = '30min'
    elif frequence in ['60min', '60m']:
        frequence = '60min'
    else:
        print(
            "QA Error QA_fetch_stock_min parameter frequence=%s is none of 1min 1m 5min 5m 15min 15m 30min 30m 60min 60m"
            % frequence)

    __data = []
    # code checking
    code = QA_util_code_tolist(code)

    cursor = collections.find({
        'code': {
            '$in': code
        },
        "time_stamp": {
            "$gte": QA_util_time_stamp(start),
            "$lte": QA_util_time_stamp(end)
        },
        'type': frequence
    })

    try:
        res = pd.DataFrame([item async for item in cursor])
    except SyntaxError:
        print('THIS PYTHON VERSION NOT SUPPORT "async for" function')
        pass
    try:
        res = res.drop('_id', axis=1).assign(
            volume=res.vol).query('volume>1').assign(datetime=pd.to_datetime(
                res.datetime, utc=False)).drop_duplicates(
                    ['datetime', 'code']).set_index('datetime', drop=False)
        # return res
    except:
        res = None
    if format in ['P', 'p', 'pandas', 'pd']:
        return res
    elif format in ['json', 'dict']:
        return QA_util_to_json_from_pandas(res)
    # 多种数据格式
    elif format in ['n', 'N', 'numpy']:
        return numpy.asarray(res)
    elif format in ['list', 'l', 'L']:
        return numpy.asarray(res).tolist()
    else:
        print(
            "QA Error QA_fetch_stock_min format parameter %s is none of  \"P, p, pandas, pd , json, dict , n, N, numpy, list, l, L, !\" "
            % format)
        return None
def save_stock_block_kline(block_kline_df):
    """保存东方财富股票概念板块K线数据(功能函数)
        注:1min数据日内产生,有可能会有遗漏,当临时数据,故另外存放,随时丢弃。
    """
    assert block_kline_df is not None, 'block_kline_df must be'
    assert len(block_kline_df) > 0, 'block_kline_df must not be 0 row'

    data = block_kline_df.reset_index()
    freq = data.iloc[0].type

    assert freq in [
        QA.FREQUENCE.DAY, QA.FREQUENCE.HOUR, QA.FREQUENCE.FIVE_MIN,
        QA.FREQUENCE.ONE_MIN
    ], 'freq only support DAY|HOUR|FIVE_MIN|ONE_MIN(自我规制)'

    if (freq == QA.FREQUENCE.DAY):
        coll = DATABASE.stock_block_em_day
        coll.create_index([('code', pymongo.ASCENDING),
                           ("date_stamp", pymongo.ASCENDING)],
                          unique=True)
    elif (freq == QA.FREQUENCE.ONE_MIN):
        coll = DATABASE.tmp_1min_stock_block_em
        coll.create_index([('code', pymongo.ASCENDING),
                           ("date", pymongo.ASCENDING)],
                          unique=False)
        coll.create_index([('code', pymongo.ASCENDING),
                           ("time_stamp", pymongo.ASCENDING)],
                          unique=True)
    else:
        coll = DATABASE.stock_block_em_min
        coll.create_index([('code', pymongo.ASCENDING),
                           ("type", pymongo.ASCENDING),
                           ("date", pymongo.ASCENDING)],
                          unique=False)
        coll.create_index([('code', pymongo.ASCENDING),
                           ("type", pymongo.ASCENDING),
                           ("time_stamp", pymongo.ASCENDING)],
                          unique=True)

    # 查询是否新数据
    if (freq == QA.FREQUENCE.DAY):
        query_id = {
            'code': data.iloc[0].code,
            'date_stamp': {
                '$in': data['date_stamp'].tolist()
            }
        }
    else:
        query_id = {
            'code': data.iloc[0].code,
            'type': freq,
            'time_stamp': {
                '$in': data['time_stamp'].tolist()
            }
        }
    refcount = coll.count_documents(query_id)

    try:
        if refcount > 0:
            if (len(data) > 1):
                # 删掉重复数据
                coll.delete_many(query_id)
                data = QA_util_to_json_from_pandas(data)
                coll.insert_many(data)
            else:
                # 持续接收行情,更新记录
                if ('created_at' in data.columns):
                    data.drop('created_at', axis=1, inplace=True)
                data = QA_util_to_json_from_pandas(data)
                coll.replace_one(query_id, data[0])
        else:
            # 新 tick,插入记录
            data = QA_util_to_json_from_pandas(data)
            coll.insert_many(data)

    except Exception as e:
        if (data is not None):
            traceback.print_exception(type(e), e, sys.exc_info()[2])
            print(u'save_stock_block_kline failed!!\n', e)
Beispiel #19
0
def QA_SU_save_data_huobi_callback(data, freq):
    """
    异步获取数据回调用的 MongoDB 存储函数
    """
    symbol_template = huobi_SYMBOL
    if ((len(data) == 1)):
        # 减少统计刷屏
        pass
    else:
        QA_util_log_info(
            'SYMBOL "{}" Recived "{}" from {} to {} in total {} klines'.format(
                data.iloc[0].symbol,
                freq,
                time.strftime(
                    '%Y-%m-%d %H:%M:%S',
                    time.localtime(data.iloc[0].time_stamp)
                )[2:16],
                time.strftime(
                    '%Y-%m-%d %H:%M:%S',
                    time.localtime(data.iloc[-1].time_stamp)
                )[2:16],
                len(data)
            )
        )
    if (freq not in ['1day', '86400', 'day', '1d']):
        col = DATABASE.cryptocurrency_min
        col.create_index(
            [
                ("symbol",
                 pymongo.ASCENDING),
                ('time_stamp',
                 pymongo.ASCENDING),
                ('date_stamp',
                 pymongo.ASCENDING)
            ]
        )
        col.create_index(
            [
                ("symbol",
                 pymongo.ASCENDING),
                ("type",
                 pymongo.ASCENDING),
                ('time_stamp',
                 pymongo.ASCENDING)
            ],
            unique=True
        )

        # 查询是否新 tick
        query_id = {
            "symbol": data.iloc[0].symbol,
            'type': data.iloc[0].type,
            'time_stamp': {
                '$in': data['time_stamp'].tolist()
            }
        }
        refcount = col.count_documents(query_id)
    else:
        col = DATABASE.cryptocurrency_day
        col.create_index(
            [
                ("symbol",
                 pymongo.ASCENDING),
                ("date_stamp",
                 pymongo.ASCENDING)
            ],
            unique=True
        )

        # 查询是否新 tick
        query_id = {
            "symbol": data.iloc[0].symbol,
            'date_stamp': {
                '$in': data['date_stamp'].tolist()
            }
        }
        refcount = col.count_documents(query_id)
    if refcount > 0:
        if (len(data) > 1):
            # 删掉重复数据
            col.delete_many(query_id)
            data = QA_util_to_json_from_pandas(data)
            col.insert_many(data)
        else:
            # 持续接收行情,更新记录
            data.drop('created_at', axis=1, inplace=True)
            data = QA_util_to_json_from_pandas(data)
            col.replace_one(query_id, data[0])
    else:
        # 新 tick,插入记录
        data = QA_util_to_json_from_pandas(data)
        col.insert_many(data)
Beispiel #20
0
 def __saving_work(code, coll):
     QA_util_log_info("##JOB03 Now Saving STOCK_MIN ==== {}".format(code),
                      ui_log=ui_log)
     try:
         for type_ in ["1min", "5min", "15min", "30min", "60min"]:
             col_filter = {"code": str(code)[0:6], "type": type_}
             ref_ = coll.find(col_filter)
             end_time = str(now_time())[0:19]
             if coll.count_documents(col_filter) > 0:
                 start_time = ref_[coll.count_documents(col_filter) -
                                   1]["datetime"]
                 QA_util_log_info(
                     "##JOB03.{} Now Saving {} from {} to {} == {}".format(
                         ["1min", "5min", "15min", "30min",
                          "60min"].index(type_),
                         str(code)[0:6],
                         start_time,
                         end_time,
                         type_,
                     ),
                     ui_log=ui_log,
                 )
                 if start_time != end_time:
                     df = jqdatasdk.get_price(
                         security=code,
                         start_date=start_time,
                         end_date=end_time,
                         frequency=type_.split("min")[0] + "m",
                     )
                     __data = __transform_jq_to_qa(df,
                                                   code=code[:6],
                                                   type_=type_)
                     if len(__data) > 1:
                         coll.insert_many(
                             QA_util_to_json_from_pandas(__data)[1::])
             else:
                 start_time = "2015-01-01 09:30:00"
                 QA_util_log_info(
                     "##JOB03.{} Now Saving {} from {} to {} == {}".format(
                         ["1min", "5min", "15min", "30min",
                          "60min"].index(type_),
                         str(code)[0:6],
                         start_time,
                         end_time,
                         type_,
                     ),
                     ui_log=ui_log,
                 )
                 if start_time != end_time:
                     __data = __transform_jq_to_qa(jqdatasdk.get_price(
                         security=code,
                         start_date=start_time,
                         end_date=end_time,
                         frequency=type_.split("min")[0] + "m",
                     ),
                                                   code=code[:6],
                                                   type_=type_)
                     if len(__data) > 1:
                         coll.insert_many(
                             QA_util_to_json_from_pandas(__data)[1::])
     except Exception as e:
         QA_util_log_info(e, ui_log=ui_log)
         err.append(code)
         QA_util_log_info(err, ui_log=ui_log)
Beispiel #21
0
 def to_json(self):
     return QA_util_to_json_from_pandas(self.data)
Beispiel #22
0
def QA_SU_save_bitmex_symbol(
    market="bitmex",
    client=DATABASE,
):
    """
    保存 bitmex 交易对信息
    """
    QA_util_log_info('Downloading {:s} symbol list...'.format(market))

    # 保存 bitmex API 原始 Symbol 数据备查阅,自动交易用得着
    raw_symbol_lists = QA_util_save_raw_symbols(QA_fetch_bitmex_symbols, market)

    if (len(raw_symbol_lists) > 0):
        # 保存到 QUANTAXIS.crypto_asset_list 数字资产列表,为了跨市场统一查询做数据汇总
        symbol_lists = pd.DataFrame(raw_symbol_lists)

        # market,symbol为 mongodb 索引字段,保存之前必须要检查存在
        symbol_lists['market'] = market
        symbol_lists['category'] = symbol_lists['typ']
        symbol_lists.rename(
            {
                'rootSymbol': 'base_currency',
                'quoteCurrency': 'quote_currency',
            },
            axis=1,
            inplace=True
        )
        symbol_lists['price_precision'] = symbol_lists.apply(
            lambda x: 2 + -1 * int(math.log10(float(x.maintMargin))),
            axis=1
        )
        symbol_lists['name'] = symbol_lists['symbol']
        symbol_lists['desc'] = ''

        # 移除非共性字段,这些字段只有 broker 才关心,做对应交易所 broker 接口的时候在交易所 raw_symbol_lists
        # 数据中读取。
        symbol_lists = symbol_lists[[
            'symbol',
            'name',
            'market',
            'state',
            'category',
            'base_currency',
            'quote_currency',
            'price_precision',
            'desc'
        ]]
        if ('_id' in symbol_lists.columns.values):
            # 有时有,必须单独删除
            symbol_lists.drop(
                [
                    '_id',
                ],
                axis=1,
                inplace=True
            )
        symbol_lists['created_at'] = int(
            time.mktime(datetime.datetime.now().utctimetuple())
        )
        symbol_lists['updated_at'] = int(
            time.mktime(datetime.datetime.now().utctimetuple())
        )

        coll_crypto_asset_list = client.crypto_asset_list
        coll_crypto_asset_list.create_index(
            [('market',
              pymongo.ASCENDING),
             ('symbol',
              pymongo.ASCENDING)],
            unique=True
        )
        try:
            query_id = {'market': market}
            if (coll_crypto_asset_list.count_documents(query_id) > 0):
                # 删掉重复数据
                query_id = {
                    'market': market,
                    'symbol': {
                        '$in': symbol_lists['symbol'].tolist()
                    }
                }
                coll_crypto_asset_list.delete_many(query_id)
            coll_crypto_asset_list.insert_many(
                QA_util_to_json_from_pandas(symbol_lists)
            )
            return symbol_lists
        except:
            QA_util_log_expection(
                'QA_SU_save_bitmex_symbol(): Insert_many(symbol) to "crypto_asset_list" got Exception with {} klines'
                .format(len(data))
            )
            pass
        return []