Ejemplo n.º 1
0
def aggregate_factory(ex_name, symbol, src_res, dst_res):
    """
    aggregate data from src_res to dst_res, it reads metadata from the dst, and aggregate
    available src_data up to date
    """
    src_len = int(pd.Timedelta(dst_res) / pd.Timedelta(src_res))
    logger = general.create_logger("compress_" + dst_res)

    src = mongo._get_lib(src_res)
    dst = mongo._get_lib(dst_res)

    end = pd.Timestamp("now").floor(dst_res)
    start = end - pd.Timedelta(dst_res)
    rng = DateRange(start=start, end=end, interval=CLOSED_OPEN)

    try:
        db_sym = ex_name + "/" + symbol
        logger.info(db_sym)
        # check dst overlap
        if dst.has_symbol(db_sym):
            dst_end = pd.Timestamp(dst.read_metadata(db_sym).metadata["end"])
            if dst_end >= start:
                logger.warning("avoid overwriting [" +
                               start.strftime("%Y-%m-%d %H:%M:%S.%f"))
                return
            else:
                start = max(dst_end + pd.Timedelta(dst_res), start)
                rng = DateRange(start=start, end=end, interval=CLOSED_OPEN)
        else:
            start = pd.Timestamp("2019-01-01")
            rng = DateRange(start=start, end=end, interval=CLOSED_OPEN)

        # read src data
        dfsrc = src.read(db_sym, date_range=rng).data

        # validate src data
        if dfsrc.empty:
            logger.warning("empty data at " +
                           start.strftime("%Y-%m-%d %H:%M:%S.%f"))
            return
        if len(dfsrc) < src_len:
            logger.warning("potential data loss at " +
                           start.strftime("%Y-%m-%d %H:%M:%S.%f"))

        # aggregate src data to dst resolution
        dfdst = compress.aggr(dfsrc, dst_res)

        # validate dst data an save
        if not dst.has_symbol(db_sym):
            dst.write(db_sym, dfdst)
        else:
            dst.append(db_sym, dfdst)
        # update metadata for end timestamp
        dst.write_metadata(db_sym, {"end": dfdst.index[-1]})
        logger.info("trades converted at " +
                    start.strftime("%Y-%m-%d %H:%M:%S.%f"))

    except Exception as e:
        print(e)
Ejemplo n.º 2
0
def mongo_buffer(delete_raw=False):
    logger = general.create_logger("mongo_buffer")
    end = pd.Timestamp("now").floor("1min")
    start = end - pd.Timedelta("1min")
    rng = DateRange(start, end, CLOSED_OPEN)
    
    prev_min = start - pd.Timedelta("1min")
    prev_min = int(prev_min.value/1e6)

    for each in TRADE_EX:
        bins = []
        try:
            libname = each + "/trades"
            lib = mongo._get_lib(libname)
            sym = 'btc/usdt'
            if each == "coinbasepro":
                sym = 'btc/usd'
            df = lib.read(sym, date_range=rng).data
            pprice = helper.get_close(prev_min, each)
            logger.info(each)
            # check empty
            if df.empty:
                logger.warning("empty data at " + start.strftime("%Y-%m-%d %H:%M:00"))

            # compress
            bins = compress.aggregate_raw(df, pprice)
            print (bins["buy"], bins["sell"])

            # save compressed
            db_sym = each+"/XBTUSD"
            mongo._get_lib("1min").append(db_sym, bins)
            # delete raw
            if delete_raw:
                mongo.delete_cache(each, mongo.get_engine(TRADE_WSS_PATH), logger=logger, start=start, end=end)

            logger.info(each + "trades converted at " + start.strftime("%Y-%m-%d %H:%M:00"))

        except Exception as e:
            print (e)
Ejemplo n.º 3
0
def _fetch(ex_name, reso_idx, start, end):
    """
    TODO: fetch data from higher resolution if potential loss happened
    """
    rng = DateRange(start=start, end=end, interval=CLOSED_OPEN)
    lib = mongo._get_lib(SUPPORTED_RESOLUTION[reso_idx])
    sym = ex_name + "/XBTUSD"
    df = lib.read(sym, date_range=rng).data
    count = (end - start) / pd.Timedelta(SUPPORTED_RESOLUTION[reso_idx])
    if len(df) < count and reso_idx > 0:
        print("_fetching ", SUPPORTED_RESOLUTION[reso_idx], len(df), count)
        return _fetch(ex_name, reso_idx - 1, start, end)
    return df
Ejemplo n.º 4
0
async def get_vol_prof(ex_name: "exchange name: str",
                       start: "start timestamp: str or object",
                       end: "end timestamp: str or object"):
    # get cache time range
    src_res = "5min"
    sym = ex_name + "/XBTUSD"
    src = mongo._get_lib(src_res)
    rng = DateRange(start=start, end=end, interval=CLOSED_OPEN)
    df1 = await src.read(sym, date_range=rng).data

    cache_end = start
    if not df1.empty:
        cache_end = df1.index[0]
        cache_end = df1.index[-1] + pd.Timedelta("5min")

    cache_end = cache_end.tz_localize(None)

    if cache_end < end:
        rng = DateRange(start=cache_end, end=end, interval=CLOSED_OPEN)
        src = mongo._get_lib("1min")
        df2 = src.read(sym, date_range=rng).data
        df1 = pd.concat([df1, df2])

    return df1
Ejemplo n.º 5
0
from btc_panel.utils import mongo

import pandas as pd
import ccxt
from arctic import TICK_STORE

client = ccxt.bitmex()
lib = mongo._get_lib("bitmex", lib_type=TICK_STORE)

def fetchOpenInterest(oi_symbols):
    global client, lib
    for each in ccxt.bitmex().fetchMarkets():
        if each["info"]["symbol"] in oi_symbols:
            sym = each["info"]["symbol"]
            df = pd.DataFrame(
                index = [pd.Timestamp(each["info"]["timestamp"]).floor("10s")],
                data=[[
                    float(each["info"]["impactMidPrice"]),
                    int(each["info"]["openValue"]/1e8),
                    int(each["info"]["turnover24h"]/1e8)
                ]],
                columns=["impactMidPrice", "openValue", "turnover24h"]
            )
            lib.write(sym, df)