Exemplo n.º 1
0
def query_api(pair, freqstr, startstr=None, endstr=None):
    """Get Historical Klines (candles) from Binance.
    @freqstr: 1m, 3m, 5m, 15m, 30m, 1h, ettc
    """
    client = app.bot.client
    t1 = Timer()
    ms_period = strtofreq(freqstr) * 1000
    end = strtoms(endstr or "now utc")
    start = strtoms(startstr or DEF_KLINE_HIST_LEN)
    results = []

    while start < end:
        try:
            data = client.get_klines(
                symbol=pair,
                interval=freqstr,
                limit=BINANCE_REST_QUERY_LIMIT,
                startTime=start, endTime=end)
        except Exception as e:
            log.exception("Binance API request error. e=%s", str(e))
            continue

        if len(data) == 0:
            start += ms_period
        else:
            results += data
            start = data[-1][0] + ms_period

    log.debug('%s %s %s queried [%ss].', len(results), freqstr, pair,
        t1.elapsed(unit='s'))
    return results
Exemplo n.º 2
0
def init(evnt_pairs):
    from app.common.timer import Timer
    from app.common.timeutils import strtofreq
    from . import candles, scanner
    global client, dfc, e_pairs

    e_pairs = evnt_pairs
    t1 = Timer()
    db = app.get_db()

    # Auth Binance client.
    cred = list(db.api_keys.find())[0]
    client = Client(cred['key'], cred['secret'])

    # Get available exchange trade pairs
    info = client.get_exchange_info()
    ops = [
        UpdateOne({'symbol': n['symbol']}, {'$set': n}, upsert=True)
        for n in info['symbols']
    ]
    db.assets.bulk_write(ops)
    #print("{} active pairs retrieved from api.".format(len(ops)))

    set_pairs([], 'DISABLED', query_temp=True)

    #print("{:,} historic candles loaded.".format(len(dfc)))
    print('{} trading algorithms.'.format(len(TRD_ALGOS)))
    print('app.bot initialized in {:,.0f} ms.'.format(t1.elapsed()))
Exemplo n.º 3
0
def update(pairs, freq, start=None, force=False):
    idx = 0
    t1 = Timer()
    candles = []

    for pair in pairs:
        data = query_api(pair, freq, start=start, force=force)
        if len(data) == 0:
            continue

        for i in range(0, len(data)):
            x = data[i]
            x = [
                pd.to_datetime(int(x[0]), unit='ms', utc=True),
                float(x[1]),
                float(x[2]),
                float(x[3]),
                float(x[4]),
                float(x[5]),
                pd.to_datetime(int(x[6]), unit='ms', utc=True),
                float(x[7]),
                int(x[8]),
                float(x[9]),
                float(x[10]),
                None
            ]
            d = dict(zip(BINANCE['KLINE_FIELDS'], x))
            d.update({'pair': pair, 'freq': freq})
            if d['volume'] > 0:
                d['buy_ratio'] = round(d['buy_vol'] / d['volume'], 4)
            else:
                d['buy_ratio'] = 0.0
            data[i] = d
        candles += data

    if len(candles) > 0:
        db = app.get_db()

        if force == True:
            ops = []
            for candle in candles:
                ops.append(ReplaceOne(
                    {"close_time":candle["close_time"],
                        "pair":candle["pair"], "freq":candle["freq"]},
                    candle,
                    upsert=True
                ))
            result = db.candles.bulk_write(ops)
        else:
            # Should not create any duplicates because of force==False
            # check in query_api()
            result = db.candles.insert_many(candles)

    log.info("%s %s records queried/stored. [%ss]",
        len(candles), freq, t1.elapsed(unit='s'))

    return candles
Exemplo n.º 4
0
def price_df(coins, date_rng):
    """Build price dataframe for list of coins within date period.
    Returns the timeseries subset where all columns have price data. i.e. newly
    listed coins with short price histories will force the entire subset to
    shrink significantly.
    """
    db = get_db()
    t0, t1 = Timer(), Timer()
    freq = date_rng.freq
    dt0 = date_rng[0].to_datetime()
    dt1 = date_rng[-1].to_datetime()
    if freq.freqstr[-1] in ['D', 'M', 'Y']:
        collname = "cmc_tick"
        field = "$close"
    elif freq.freqstr[-1] in ['T', 'H']:
        collname = "cmc_tick"
        field = "$price_usd"

    cursor = db[collname].aggregate([{
        "$match": {
            "symbol": {
                "$in": coins
            },
            "date": {
                "$gte": dt0,
                "$lt": dt1
            }
        }
    }, {
        "$group": {
            "_id": "$symbol",
            "date": {
                "$push": "$date"
            },
            "price": {
                "$push": field
            }
        }
    }])
    if not cursor:
        return log.error("empty dataframe!")

    coindata = list(cursor)

    df = pd.DataFrame(index=date_rng)

    for coin in coindata:
        df2 = pd.DataFrame(
            coin['price'], columns=[coin['_id']],
            index=coin['date']).resample(freq).mean().sort_index()
        df = df.join(df2)  #.resample(freq).mean().sort_index()

    n_drop = sum(df.isnull().sum())
    df = df.dropna().round(2)
    log.debug("price_df: frame=[{:,} x {:,}], dropped={:,}, t={}ms".format(
        len(df), len(df.columns), n_drop, t0))
    return df
Exemplo n.º 5
0
def _scanner():
    scanner.update(25, idx_filter='BTC')
    scan = Timer(name='scanner', expire='every 30 clock min utc')

    while True:
        if scan.remain() == 0:
            scanner.update(25, idx_filter='BTC')
            scan.reset()

        time.sleep(1800)
Exemplo n.º 6
0
def query_api(pair, freq, start=None, end=None, force=False):
    """Get Historical Klines (candles) from Binance.
    @freq: Binance kline frequency:
        1m, 3m, 5m, 15m, 30m, 1h, 2h, 4h, 6h, 8h, 12h, 1d, 3d, 1w, 1M]
        m -> minutes; h -> hours; d -> days; w -> weeks; M -> months
    @force: if False, only query unstored data (faster). If True, query all.
    Return: list of OHLCV value
    """
    t1 = Timer()
    limit = 500
    idx = 0
    results = []
    periodlen = intrvl_to_ms(freq)
    end_ts = datestr_to_ms(end) if end else dt_to_ms(now())
    start_ts = datestr_to_ms(start) if start else end_ts - (periodlen * 20)

    # Skip queries for records already stored
    if force == False:
        query = {"pair":pair, "freq":freq}
        if start:
            query["open_time"] = {"$gt": datestr_to_dt(start)}

        newer = app.get_db().candles.find(query).sort('open_time',-1).limit(1)

        if newer.count() > 0:
            dt = list(newer)[0]['open_time']
            start_ts = int(dt.timestamp()*1000 + periodlen)

            if start_ts > end_ts:
                log.debug("All records for %s already stored.", pair)
                return []

    client = Client("", "")

    #while len(results) < 500 and start_ts < end_ts:
    while start_ts < end_ts:
        try:
            data = client.get_klines(symbol=pair, interval=freq,
                limit=limit, startTime=start_ts, endTime=end_ts)

            if len(data) == 0:
                start_ts += periodlen
            else:
                # Don't want candles that aren't closed yet
                if data[-1][6] >= dt_to_ms(now()):
                    results += data[:-1]
                    break
                results += data
                start_ts = data[-1][0] + periodlen
        except Exception as e:
            log.exception("Binance API request error. e=%s", str(e))

    log.debug('%s %s %s queried [%ss].', len(results), freq, pair,
        t1.elapsed(unit='s'))
    return results
Exemplo n.º 7
0
def init():
    """Preload candles records from mongoDB to global dataframe.
    Performance: ~3,000ms/100k records
    """
    t1 = Timer()
    log.info('Preloading historic data...')

    span = delta(days=7)
    app.bot.dfc = candles.merge_new(pd.DataFrame(), pairs, span=span)

    global client
    client = Client("", "")

    log.info('{:,} records loaded in {:,.1f}s.'.format(len(app.bot.dfc),
                                                       t1.elapsed(unit='s')))
Exemplo n.º 8
0
def query_api_mkt():
    """Update 5T market index data from coinmarketcap.com REST API.
    """
    t1 = Timer()

    try:
        r = requests.get("https://api.coinmarketcap.com/v1/global")
        data = json.loads(r.text)
    except Exception as e:
        return log.error("API error %s", r.status_code)

    if r.status_code != 200:
        return log.error("API error %s", r.status_code)

    print(r.status_code)

    store = {}
    for m in coinmarketcap['api']['markets']:
        store[m["to"]] = m["type"]( data[m["from"]] )

    get_db().cmc_mkt.replace_one(
        {'date':store['date']}, store,
        upsert=True)

    log.info("Coinmktcap markets updated. [{}ms]".format(t1))
Exemplo n.º 9
0
def _trading():
    """Main trade cycle loop.
    """
    print('Preloading historic data....')
    trade.init()

    #timer_1m = Timer(name='trade_1m', expire='every 1 clock min utc')
    timer_5m = Timer(name='trade_5m', expire='every 5 clock min utc')

    while True:
        if timer_5m.remain() == 0:
            time.sleep(10)
            trade.update('5m')
            timer_5m.reset()
        """if timer_1m.remain() == 0:
            time.sleep(8)
            trade.update('1m')
            timer_1m.reset()
        """
        time.sleep(5)
Exemplo n.º 10
0
def run(e_pairs, e_kill):
    """Main scanner thread loop.
    """
    tmr = Timer(expire='every 20 clock minutes utc', quiet=True)
    sma_med_trend_filter()

    while True:
        if e_kill.isSet():
            break
        if tmr.remain() == 0:
            # Edit conf w/o having to restart bot.
            importlib.reload(docs.botconf)
            lock.acquire()
            print("{} pairs enabled pre-scan.".format(len(get_pairs())))
            lock.release()
            # Reset enabled pairs to only open trades.
            set_pairs([],'ENABLED', exclusively=True)
            # Scan and enable any additional filtered pairs.
            sma_med_trend_filter()
            tmr.reset()
        time.sleep(3)
    print("Scanner thread: terminating...")
Exemplo n.º 11
0
def run(e_pairs, e_kill):
    global storedata, connkeys, ws
    client = app.bot.client

    #print("Connecting to websocket...")
    ws = BinanceSocketManager(client)

    pairs = get_pairs()
    connkeys += [ws.start_kline_socket(pair, recv_kline, interval=n) \
        for n in TRD_FREQS for pair in pairs]
    lock.acquire()
    print("Subscribed to {} kline sockets.".format(len(connkeys)))
    lock.release()

    ws.start()
    #print('Connected. Press Ctrl+C to quit')

    tmr = Timer(name='pairs', expire='every 5 clock min utc', quiet=True)

    while True:
        if e_kill.isSet():
            break

        if e_pairs.isSet():
            update_sockets()
            e_pairs.clear()

        if tmr.remain() == 0:
            tmr.reset()
            if len(storedata) > 0:
                #print("websock_thread: saving new candles...")
                candles.bulk_save(storedata)
                storedata = []

        time.sleep(1)

    close_all()
    print("Websock thread: Terminating...")
Exemplo n.º 12
0
def _daily():
    """
    """
    timer_1d = Timer(name='daily', expire=utc_dtdate()+timedelta(days=1))

    while True:
        if timer_1d.remain() == 0:
            app.eod_tasks()
            timer_1d.set_expiry(utc_dtdate() + timedelta(days=1))

        print("daily: {:} sec remain".format(timer_1d.remain(unit='s')))
        time.sleep(timer_1d.remain()/1000)
Exemplo n.º 13
0
def show_history(stdscr, symbol):
    log.info("Querying %s ticker history", symbol)
    t1 = Timer()
    db = get_db()

    ex = forex.getrate('CAD', utc_dtdate())
    n_display = 95
    colspace = 3
    indent = 2
    hdr = ['Date', 'Open', 'High', 'Low', 'Close', 'Market Cap', 'Vol 24h']

    tickerdata = db.cmc_tick.find({
        "symbol": symbol
    }).sort('date', -1).limit(n_display)
    n_datarows = tickerdata.count()
    log.debug("%s tickers queried in %sms", tickerdata.count(), t1)

    if tickerdata.count() == 0:
        log.info("No ticker history found for %s", symbol)
        return False
    strrows = []
    for tck in tickerdata:
        strrows.append([
            tck['date'].strftime("%m-%d-%Y"),
            pretty(tck['open'], t="money"),
            pretty(tck['high'], t="money"),
            pretty(tck['low'], t="money"),
            pretty(tck['close'], t="money"),
            pretty(tck['mktcap_usd'], t="money", abbr=True),
            pretty(tck['vol_24h_usd'], t="money", abbr=True)
        ])
    import curses
    from app.screen import divider, printrow, _colsizes
    colwidths = _colsizes(hdr, strrows)

    stdscr.clear()
    stdscr.addstr(0, indent, "%s History" % symbol)
    printrow(stdscr, 2, hdr, colwidths, [c.WHITE for n in hdr], colspace)

    divider(stdscr, 3, colwidths, colspace)
    for i in range(0, len(strrows)):
        colors = [c.WHITE for n in range(0, 7)]
        printrow(stdscr, i + 4, strrows[i], colwidths, colors, colspace)

    n_rem_scroll = n_datarows - (curses.LINES - 4)
    log.info("n_datarows=%s, n_rem_scroll=%s", n_datarows, n_rem_scroll)
    return n_rem_scroll
Exemplo n.º 14
0
def _data(now=False):
    """
    """
    cmc = Timer(name='cmc', expire='every 5 clock min utc')
    if now == True:
        tickers.update(limit=500)

    while True:
        if cmc.remain() == 0:
            tickers.update(limit=500)
            print('Updated CMC tickers')
            cmc.reset()

        print("cmc: {:} sec remain".format(cmc.remain(unit='s')))
        time.sleep(cmc.remain()/1000)
Exemplo n.º 15
0
def query_api_tick(start=0, limit=None):
    """Update 5T ticker data from coinmarketcap.com REST API.
    """
    idx = start
    t1 = Timer()
    db = get_db()

    try:
        r = requests.get("https://api.coinmarketcap.com/v1/ticker/?start={}&limit={}"\
            .format(idx, limit or 0))
        data = json.loads(r.text)
    except Exception as e:
        return log.error("API error %s", r.status_code)

    if r.status_code != 200:
        return log.error("API error %s", r.status_code)

    # Sort by timestamp in descending order
    data = sorted(data, key=lambda x: int(x["last_updated"] or 1))[::-1]

    # Prune outdated tickers
    ts_range = range(
        int(data[0]["last_updated"]) - 180,
        int(data[0]["last_updated"]) + 1)
    tickerdata = [ n for n in data if n["last_updated"] and int(n["last_updated"]) in ts_range ]
    _dt = to_dt(int(data[0]["last_updated"]))
    updated = _dt - delta(seconds=_dt.second, microseconds=_dt.microsecond)
    ops = []

    for ticker in tickerdata:
        store={"date":updated}

        for f in coinmarketcap['api']['tickers']:
            try:
                val = ticker[f["from"]]
                store[f["to"]] = f["type"](val) if val else None
            except Exception as e:
                log.exception("%s ticker error", ticker["symbol"])
                continue

        ops.append(ReplaceOne(
            {'date':updated, 'symbol':store['symbol']}, store, upsert=True))

    if save_capped_db(ops, db.cmc_tick):
        log.info("%s Coinmktcap tickers updated. [%sms]", len(tickerdata), t1)
Exemplo n.º 16
0
def scrape_history(_id, name, symbol, rank, start, end):
    """Scrape coinmarketcap for historical ticker data in given date range.
    """
    db = get_db()
    bulkops = []
    t1 = Timer()

    # Scrape data
    try:
        html = download_data(_id, start.strftime("%Y%m%d"), end.strftime("%Y%m%d"))
    except Exception as e:
        return log.exception("Error scraping %s", symbol)
    else:
        header, rows = extract_data(html)

    for row in rows:
        # ["date", "open", "high", "low", "close", "vol_24h_usd", "mktcap_usd"]
        document = {
            "symbol":symbol,
            "id":_id,
            "name":name,
            "date":dateparser.parse(row[0]).replace(tzinfo=pytz.utc),
            "open":float(row[1]),
            "high":float(row[2]),
            "low":float(row[3]),
            "close":float(row[4]),
            "spread":float(row[2]) - float(row[3]),
            "vol_24h_usd":to_int(row[5]),
            "mktcap_usd":to_int(row[6]),
            "rank_now":rank
        }
        bulkops.append(ReplaceOne(
            {"symbol":symbol, "date":document["date"]},
            document,
            upsert=True))

    if len(bulkops) < 1:
        log.info("No results for symbol=%s, start=%s, end=%s",
            symbol, start, end)
        return True

    result = db.tickers_1d.bulk_write(bulkops)

    log.info("upd_hist_tckr: sym=%s, scraped=%s, mod=%s, upsert=%s (%s ms)",
        symbol, len(rows), result.modified_count, result.upserted_count, t1)
Exemplo n.º 17
0
def bulk_save(data, silent=False):
    """db.candles collection has unique index on (pair, freq, open_time) key
    so we can bulk insert without having to check for duplicates. Just need
    to set ordered=False and catch the exception, but every item insert
    will be attempted. May still be slow performance-wise...
    """
    t1 = Timer()
    n_insert = None
    try:
        result = app.get_db().candles.insert_many(data, ordered=False)
    except OperationFailure as e:
        n_insert = len(data) - len(e.details['writeErrors'])
        #print(e.details['writeErrors'][0])
    else:
        n_insert = len(result.inserted_ids)

    msg = "Saved {}/{} new records. [{} ms]".format(n_insert, len(data), t1)
    log.debug(msg)
    if silent is False:
        lock.acquire()
        print(msg)
        lock.release()
Exemplo n.º 18
0
def api_update(pairs, freqstrs, startstr=None, silent=False):
    db = app.get_db()
    client = app.bot.client
    t1 = Timer()
    candles = []

    for pair in pairs:
        for freqstr in freqstrs:
            if freqstr == '1d':
                data = query_api(pair, freqstr, startstr="120 days ago utc")
            else:
                data = query_api(pair, freqstr, startstr=startstr)

            if len(data) == 0:
                continue
            for i in range(0, len(data)):
                x = data[i]
                x = [
                    pd.to_datetime(int(x[0]), unit='ms', utc=True),
                    float(x[1]),
                    float(x[2]),
                    float(x[3]),
                    float(x[4]),
                    float(x[5]),
                    pd.to_datetime(int(x[6]), unit='ms', utc=True),
                    float(x[7]),
                    int(x[8]),
                    float(x[9]),
                    float(x[10]),
                    None
                ]
                d = dict(zip(BINANCE_REST_KLINES, x))
                d.update({'pair': pair, 'freqstr': freqstr})
                data[i] = d
            candles += data

    bulk_save(candles, silent=silent)
    return candles
Exemplo n.º 19
0
#---------------------------------------------------------------------------
if __name__ == '__main__':
    db = set_db('localhost')
    cred = list(db.api_keys.find())[0]
    killer = GracefulKiller()

    print("Connecting to Binance websocket client...")
    client = Client(cred['key'], cred['secret'])
    bnc_wss = BinanceSocketManager(client)
    connect_klines(bnc_wss, pairs)
    print("{} connections created.".format(len(conn_keys)))
    bnc_wss.start()

    print('Connected.')
    print('Press Ctrl+C to quit')

    timer_1m = Timer(name='pairs', expire='every 1 clock min utc')

    while True:
        if timer_1m.remain(quiet=True) == 0:
            pairs = detect_pair_change()
            timer_1m.reset(quiet=True)

        if killer.kill_now:
            print('Caught SIGINT command. Shutting down...')
            break
        update_spinner()
        time.sleep(0.1)

    close_all()
Exemplo n.º 20
0
def bulk_load(pairs, freqstrs, startstr=None, startdt=None):
    """Merge only newly updated DB records into dataframe to avoid ~150k
    DB reads every main loop.
    """
    db = app.get_db()
    t1 = Timer()
    columns = ['open', 'close', 'high', 'low', 'trades', 'volume', 'buy_vol']
    exclude = ['_id', 'quote_vol','sell_vol', 'close_time']
    proj = dict(zip(exclude, [False]*len(exclude)))
    query = {
        'pair': {'$in':pairs},
        'freqstr': {'$in':freqstrs}
    }

    if startstr:
        query['open_time'] = {'$gte':parse(startstr)}
    elif startdt:
        query['open_time'] = {'$gte':startdt}

    batches = db.candles.find_raw_batches(query, proj)
    if batches.count() < 1:
        print("No db matches for query {}.".format(query))
        return app.bot.dfc

    dtype = np.dtype([
        ('pair', 'S12'),
        ('freqstr', 'S3'),
        ('open_time', np.int64),
        ('open', np.float64),
        ('close', np.float64),
        ('high', np.float64),
        ('low', np.float64),
        ('buy_vol', np.float64),
        ('volume', np.float64),
        ('trades', np.int32)
    ])
    # Bulk load mongodb records into predefined, fixed-size numpy array.
    # 10x faster than manually casting mongo cursor into python list.
    try:
        ndarray = sequence_to_ndarray(batches, dtype, batches.count())
    except Exception as e:
        print(str(e))
        return app.bot.dfc

    # Build multi-index dataframe from ndarray
    df = pd.DataFrame(ndarray)
    df['open_time'] = pd.to_datetime(df['open_time'], unit='ms')
    df['freqstr'] = df['freqstr'].str.decode('utf-8')
    df['pair'] = df['pair'].str.decode('utf-8')
    # Convert freqstr->freq to enable index sorting
    df = df.rename(columns={'freqstr':'freq'})
    [df['freq'].replace(n, strtofreq(n), inplace=True) for n in TRD_FREQS]
    df.sort_values(by=['pair','freq','open_time'], inplace=True)

    dfc = pd.DataFrame(df[columns].values,
        index = pd.MultiIndex.from_arrays(
            [df['pair'], df['freq'], df['open_time']],
            names = ['pair','freq','open_time']),
        columns = columns
    ).sort_index()

    n_bulk = len(dfc)

    app.bot.dfc = pd.concat([app.bot.dfc, dfc])
    app.bot.dfc = app.bot.dfc[~app.bot.dfc.index.duplicated(keep='first')]

    n_merged = len(dfc) - n_bulk

    log.debug("{:,} docs loaded, {:,} merged in {:,.1f} ms."\
        .format(n_bulk, n_merged, t1))

    return app.bot.dfc
Exemplo n.º 21
0
def show_portfolio(stdscr):
    diff = tickers.diff
    t1 = Timer()
    db = get_db()
    total = 0.0
    profit = 0
    datarows, updated = [], []
    ex = forex.getrate('CAD', utc_dtdate())
    hdr = [
        'Rank', 'Sym', 'Price', 'Mcap', 'Vol 24h', '1 Hour', '24 Hour',
        '7 Day', '30 Days', '3 Months', 'Amount', 'Value', '/100'
    ]

    # Build datarows
    for hold in db.portfolio.find():
        cursor = db.cmc_tick.find({
            "symbol": hold["symbol"]
        }).sort("date", -1).limit(1)

        if cursor.count() < 1: continue
        tckr = cursor.next()

        value = round(hold['amount'] * ex * tckr['price_usd'], 2)
        profit += (tckr['pct_24h'] / 100) * value if tckr['pct_24h'] else 0.0
        total += value
        updated.append(tckr["date"].timestamp())

        datarows.append([
            tckr['rank'], tckr['symbol'], ex * round(tckr['price_usd'], 2),
            ex * tckr.get('mktcap_usd', 0), ex * tckr["vol_24h_usd"],
            tckr["pct_1h"], tckr["pct_24h"], tckr["pct_7d"],
            diff(tckr["symbol"],
                 tckr["price_usd"],
                 "30D",
                 to_format="percentage"),
            diff(tckr["symbol"],
                 tckr["price_usd"],
                 "90D",
                 to_format="percentage"), hold['amount'], value, None
        ])

    # Calculate porfolio %
    for datarow in datarows:
        datarow[12] = round((float(datarow[11]) / total) * 100, 2)
    # Sort by value
    datarows = sorted(datarows, key=lambda x: int(x[11]))[::-1]

    rows, colors = [], []
    for datarow in datarows:
        colors.append([c.WHITE] * 5 +
                      [pnlcolor(datarow[n])
                       for n in range(5, 10)] + [c.WHITE] * 3)
        rows.append([
            datarow[0], datarow[1],
            pretty(datarow[2], t='money'),
            pretty(datarow[3], t='money', abbr=True),
            pretty(datarow[4], t='money', abbr=True),
            pretty(datarow[5], t='pct', f='sign'),
            pretty(datarow[6], t='pct', f='sign'),
            pretty(datarow[7], t='pct', f='sign'),
            pretty(datarow[8], t='pct', f='sign'),
            pretty(datarow[9], t='pct', f='sign'),
            pretty(datarow[10], abbr=True),
            pretty(datarow[11], t='money'),
            pretty(datarow[12], t='pct')
        ])

    # Print title Row
    stdscr.clear()
    updated = to_relative_str(utc_datetime() - to_dt(max(updated))) + " ago"
    stdscr.addstr(0, 2, "Updated %s" % updated)
    stdscr.addstr(0, stdscr.getmaxyx()[1] - 5, CURRENCY.upper())
    stdscr.addstr(1, 0, "")

    # Portfolio datatable
    print_table(stdscr, ["Portfolio"],
                hdr,
                rows,
                colors,
                align='right',
                colsp=2,
                div=True)
    stdscr.addstr(stdscr.getyx()[0] + 1, 0, "")

    # Summary table
    print_table(stdscr, ["Summary"], ["Holdings", "24 Hour", "Total Value"], [[
        len(datarows),
        pretty(int(profit), t="money", f='sign', d=0),
        pretty(total, t='money')
    ]], [[c.WHITE, pnlcolor(profit), c.WHITE]],
                div=True)
Exemplo n.º 22
0
def merge_new(dfc, pairs, span=None):
    """Merge only newly updated DB records into dataframe to avoid ~150k
    DB reads every main loop.
    """
    global last_update
    t1 = Timer()
    columns = ['open', 'close', 'trades', 'volume', 'buy_ratio']
    exclude = ['_id','high','low','quote_vol','sell_vol', 'close_time']
    projection = dict(zip(exclude, [False]*len(exclude)))
    idx, data = [], []
    db = app.get_db()

    if span is None and last_update:
        # If no span, query/merge db records inserted since last update.
        oid = ObjectId.from_datetime(last_update)
        last_update = now()
        _filter = {'_id':{'$gte':oid}}
    else:
        # Else query/merge all since timespan.
        span = span if span else timedelta(days=7)
        last_update = now()
        _filter = {'pair':{'$in':pairs}, 'close_time':{'$gte':now()-span}}

    batches = db.candles.find_raw_batches(_filter, projection)

    if batches.count() < 1:
        return dfc

    try:
        ndarray = bsonnumpy.sequence_to_ndarray(
            batches,
            dtype,
            db.candles.count()
        )
    except Exception as e:
        log.error(str(e))
        return dfc
        #raise

    df = pd.DataFrame(ndarray)
    df['open_time'] = pd.to_datetime(df['open_time'], unit='ms')
    df['freq'] = df['freq'].str.decode('utf-8')
    df['pair'] = df['pair'].str.decode('utf-8')

    df['freq'] = df['freq'].replace('1m',60)
    df['freq'] = df['freq'].replace('5m',300)
    df['freq'] = df['freq'].replace('1h',3600)
    df['freq'] = df['freq'].replace('1d',86400)
    df = df.sort_values(by=['pair','freq','open_time'])

    df2 = pd.DataFrame(df[columns].values,
        index = pd.MultiIndex.from_arrays(
            [df['pair'], df['freq'], df['open_time']],
            names = ['pair','freq','open_time']),
        columns = columns
    ).sort_index()

    df3 = pd.concat([dfc, df2]).drop_duplicates().sort_index()

    log.debug("{:,} records loaded into numpy. [{:,.1f} ms]".format(
        len(df3), t1))
    #print("Done in %s ms" % t1)
    return df3
Exemplo n.º 23
0
def run(e_pairs, e_kill):
    """Main trading loop thread. Consumes candle data from queue and
    manages/executes trades.
    TODO: add in code for tracking unclosed candle wicks prices:
        # Clear all partial candle data
        dfW = dfW.drop([(c['pair'], strtofreq(c['freqstr']))])
    """
    from main import q
    db = app.get_db()
    t1 = Timer()
    tmr1 = Timer(name='pos', expire='every 1 clock min utc', quiet=True)
    tmr10 = Timer(name='earn', expire='every 10 clock min utc', quiet=True)

    reports.positions()
    reports.earnings()

    n = 0
    while True:
        if e_kill.isSet():
            break
        ent_ids, ex_ids = [], []

        # Trading algo inner loop.
        while q.empty() == False:
            c = q.get()
            candles.modify_dfc(c)
            ss = snapshot(c)
            query = {
                'pair': c['pair'],
                'freqstr': c['freqstr'],
                'status': 'open'
            }

            # Eval position entries/exits

            for trade in db.trades.find(query):
                update_stats(trade, ss)
                ex_ids += eval_exit(trade, c, ss)

            if c['closed'] and c['pair'] in get_pairs():
                ent_ids += eval_entry(c, ss)

            n += 1

        # Reporting outer loop.
        if tmr1.remain() == 0:
            reports.positions()
            tmr1.reset()
        if tmr10.remain() == 0:
            reports.earnings()
            tmr10.reset()
        if len(ent_ids) + len(ex_ids) > 0:
            reports.trades(ent_ids + ex_ids)
        if n > 75:
            lock.acquire()
            print('{} queue items processed. [{:,.0f} ms/item]'\
                .format(n, t1.elapsed()/n))
            lock.release()
            t1.reset()
            n = 0

        # Outer loop tail
        if len(ex_ids) > 0:
            if c['pair'] not in get_pairs():
                # TODO: check no other open positions hold this pair, safe
                # for disabling.
                set_pairs([c['pair']], 'DISABLED')
        update_spinner()
        time.sleep(0.1)

    print('Trade thread: Terminating...')
Exemplo n.º 24
0
# client
import curses, logging
from docs.conf import *
from app.common.timer import Timer
from app import set_db, get_db
from app.crunch import screen, views
from curses import KEY_UP, KEY_DOWN
log = logging.getLogger("client")

# Globals
scrollpos = scrollremain = 0
scrollscr = None
view = None
timer = Timer()


#----------------------------------------------------------------------
def update_db(collection, data):
    # Initialize if collection empty
    db = get_db()
    if db[collection].find().count() == 0:
        for item in data:
            db[collection].insert_one(item)
            log.info('initialized %s symbol %s', collection, item['symbol'])
    # Update collection
    else:
        for item in data:
            db[collection].replace_one({'symbol': item['symbol']},
                                       item,
                                       upsert=True)
            log.debug('updated %s symbol %s', collection, item['symbol'])
Exemplo n.º 25
0
def update(_freq_str):
    """Evaluate Binance market data and execute buy/sell trades.
    """
    global n_cycles, freq_str, freq

    trade_ids = []
    freq_str = _freq_str
    freq = strtofreq[freq_str]
    t1 = Timer()
    db = get_db()

    # Update candles updated by websocket
    app.bot.dfc = candles.merge_new(app.bot.dfc, pairs, span=None)

    tradelog('*' * 80)
    duration = to_relative_str(now() - start)
    hdr = "Cycle #{}, Period {} {:>%s}" % (61 - len(str(n_cycles)))
    tradelog(hdr.format(n_cycles, freq_str, duration))
    tradelog('*' * 80)

    # Output candle signals to siglog
    if freq_str in siglog_freq:
        siglog('-' * 80)
        for pair in pairs:
            printer.candle_sig(candles.newest(pair, freq_str, df=app.bot.dfc))

    # Evaluate existing positions
    active = list(db.trades.find({'status': 'open', 'freq': freq_str}))

    for trade in active:
        candle = candles.newest(trade['pair'], freq_str, df=app.bot.dfc)
        result = strategy.update(candle, trade)

        print('{} {} {}'.format(candle['pair'], candle['freq'],
                                result['snapshot']['details']))

        if result['action'] == 'SELL':
            trade_ids += [sell(trade, candle, criteria=result)]
        else:
            db.trades.update_one({"_id": trade["_id"]},
                                 {"$push": {
                                     "snapshots": result['snapshot']
                                 }})

    # Inverse active list and evaluate opening new positions
    inactive = sorted(list(set(pairs) - set([n['pair'] for n in active])))

    for pair in inactive:
        candle = candles.newest(pair, freq_str, df=app.bot.dfc)
        results = strategy.evaluate(candle)
        for res in results:
            print('{} {} {}'.format(candle['pair'], candle['freq'],
                                    res['snapshot']['details']))

            if res['action'] == 'BUY':
                trade_ids += [buy(candle, criteria=res)]

    tradelog('-' * 80)
    printer.new_trades([n for n in trade_ids if n])
    tradelog('-' * 80)
    printer.positions('open')
    tradelog('-' * 80)
    printer.positions('closed')

    n_cycles += 1
Exemplo n.º 26
0
def agg_describe(pair, freqstr, n_periods, pdfreqstr=None):
    """Describe aggregate macd positive/negative oscilator phases in timespan.
    """
    t1 = Timer()
    from app.common.utils import to_relative_str as relative
    freq = strtofreq[freqstr]

    df_macd = generate(
        app.bot.dfc.loc[pair, freq]
    ).dropna().tail(n_periods).asfreq(pdfreqstr)

    phases=[]
    last_iloc = 0

    while last_iloc <= len(df_macd) - 1:
        phase = _get_phase(df_macd, last_iloc+1)
        if not phase:
            break
        phases.append(phase)
        last_iloc = phase['iloc'][1]

    stats = {
        'pair':pair,
        'freqstr':freqstr,
        'periods':len(df_macd['macd_diff']),
        'phases': len(phases)
    }

    #"{} MACD Phase Analysis\n"\
    summary = "\n"\
        "Freq: {}, Periods: {}, Total Phases: {}\n"\
        .format(
            #pair,
            freqstr, len(df_macd['macd_diff']), len(phases))

    for sign in ['POSITIVE', 'NEGATIVE']:
        grp = [ n for n in phases if n['sign'] == sign ]

        area = np.array([ n['area'] for n in grp ])
        if len(area) == 0:
            area = np.array([0])

        periods = np.array([ n['length'] for n in grp ])
        if len(periods) == 0:
            periods = np.array([0])

        duration = np.array([ n['seconds'] for n in grp])
        if len(duration) == 0:
            duration = np.array([0])

        price_diff = np.array([
            pct_diff(
                n['df'].iloc[0]['close'],
                n['df'].iloc[-1]['close']
            ) for n in grp
        ])

        summary += \
            "\t{} Phases: {}\n"\
            "\tPrice: {:+.2f}% (mean: {:+.2f}%)\n"\
            "\tArea: {:.2f} (mean: {:.2f})\n"\
            "\tPeriods: {:} (mean: {:.2f})\n"\
            "\tDuration: {:} (mean: {})\n"\
            .format(
                sign.title(),
                len(grp),
                price_diff.sum(), price_diff.mean(),
                abs(area.sum()), abs(area.mean()),
                periods.sum(), periods.mean(),
                relative(delta(seconds=int(duration.sum()))),
                relative(delta(seconds=int(duration.mean())))
            )

        stats[sign] = {
            'n_phases': len(grp),
            'price_diff': pd.DataFrame(price_diff).describe().to_dict(),
            'area': pd.DataFrame(area).describe().to_dict(),
            'periods': pd.DataFrame(periods).describe().to_dict(),
            'duration': pd.DataFrame(duration).describe().to_dict()
        }

    return {
        'summary':summary,
        'stats':stats,
        'phases':phases,
        'elapsed_ms':t1.elapsed()
    }