def query_api(pair, freqstr, startstr=None, endstr=None): """Get Historical Klines (candles) from Binance. @freqstr: 1m, 3m, 5m, 15m, 30m, 1h, ettc """ client = app.bot.client t1 = Timer() ms_period = strtofreq(freqstr) * 1000 end = strtoms(endstr or "now utc") start = strtoms(startstr or DEF_KLINE_HIST_LEN) results = [] while start < end: try: data = client.get_klines( symbol=pair, interval=freqstr, limit=BINANCE_REST_QUERY_LIMIT, startTime=start, endTime=end) except Exception as e: log.exception("Binance API request error. e=%s", str(e)) continue if len(data) == 0: start += ms_period else: results += data start = data[-1][0] + ms_period log.debug('%s %s %s queried [%ss].', len(results), freqstr, pair, t1.elapsed(unit='s')) return results
def init(evnt_pairs): from app.common.timer import Timer from app.common.timeutils import strtofreq from . import candles, scanner global client, dfc, e_pairs e_pairs = evnt_pairs t1 = Timer() db = app.get_db() # Auth Binance client. cred = list(db.api_keys.find())[0] client = Client(cred['key'], cred['secret']) # Get available exchange trade pairs info = client.get_exchange_info() ops = [ UpdateOne({'symbol': n['symbol']}, {'$set': n}, upsert=True) for n in info['symbols'] ] db.assets.bulk_write(ops) #print("{} active pairs retrieved from api.".format(len(ops))) set_pairs([], 'DISABLED', query_temp=True) #print("{:,} historic candles loaded.".format(len(dfc))) print('{} trading algorithms.'.format(len(TRD_ALGOS))) print('app.bot initialized in {:,.0f} ms.'.format(t1.elapsed()))
def update(pairs, freq, start=None, force=False): idx = 0 t1 = Timer() candles = [] for pair in pairs: data = query_api(pair, freq, start=start, force=force) if len(data) == 0: continue for i in range(0, len(data)): x = data[i] x = [ pd.to_datetime(int(x[0]), unit='ms', utc=True), float(x[1]), float(x[2]), float(x[3]), float(x[4]), float(x[5]), pd.to_datetime(int(x[6]), unit='ms', utc=True), float(x[7]), int(x[8]), float(x[9]), float(x[10]), None ] d = dict(zip(BINANCE['KLINE_FIELDS'], x)) d.update({'pair': pair, 'freq': freq}) if d['volume'] > 0: d['buy_ratio'] = round(d['buy_vol'] / d['volume'], 4) else: d['buy_ratio'] = 0.0 data[i] = d candles += data if len(candles) > 0: db = app.get_db() if force == True: ops = [] for candle in candles: ops.append(ReplaceOne( {"close_time":candle["close_time"], "pair":candle["pair"], "freq":candle["freq"]}, candle, upsert=True )) result = db.candles.bulk_write(ops) else: # Should not create any duplicates because of force==False # check in query_api() result = db.candles.insert_many(candles) log.info("%s %s records queried/stored. [%ss]", len(candles), freq, t1.elapsed(unit='s')) return candles
def query_api(pair, freq, start=None, end=None, force=False): """Get Historical Klines (candles) from Binance. @freq: Binance kline frequency: 1m, 3m, 5m, 15m, 30m, 1h, 2h, 4h, 6h, 8h, 12h, 1d, 3d, 1w, 1M] m -> minutes; h -> hours; d -> days; w -> weeks; M -> months @force: if False, only query unstored data (faster). If True, query all. Return: list of OHLCV value """ t1 = Timer() limit = 500 idx = 0 results = [] periodlen = intrvl_to_ms(freq) end_ts = datestr_to_ms(end) if end else dt_to_ms(now()) start_ts = datestr_to_ms(start) if start else end_ts - (periodlen * 20) # Skip queries for records already stored if force == False: query = {"pair":pair, "freq":freq} if start: query["open_time"] = {"$gt": datestr_to_dt(start)} newer = app.get_db().candles.find(query).sort('open_time',-1).limit(1) if newer.count() > 0: dt = list(newer)[0]['open_time'] start_ts = int(dt.timestamp()*1000 + periodlen) if start_ts > end_ts: log.debug("All records for %s already stored.", pair) return [] client = Client("", "") #while len(results) < 500 and start_ts < end_ts: while start_ts < end_ts: try: data = client.get_klines(symbol=pair, interval=freq, limit=limit, startTime=start_ts, endTime=end_ts) if len(data) == 0: start_ts += periodlen else: # Don't want candles that aren't closed yet if data[-1][6] >= dt_to_ms(now()): results += data[:-1] break results += data start_ts = data[-1][0] + periodlen except Exception as e: log.exception("Binance API request error. e=%s", str(e)) log.debug('%s %s %s queried [%ss].', len(results), freq, pair, t1.elapsed(unit='s')) return results
def init(): """Preload candles records from mongoDB to global dataframe. Performance: ~3,000ms/100k records """ t1 = Timer() log.info('Preloading historic data...') span = delta(days=7) app.bot.dfc = candles.merge_new(pd.DataFrame(), pairs, span=span) global client client = Client("", "") log.info('{:,} records loaded in {:,.1f}s.'.format(len(app.bot.dfc), t1.elapsed(unit='s')))
def agg_describe(pair, freqstr, n_periods, pdfreqstr=None): """Describe aggregate macd positive/negative oscilator phases in timespan. """ t1 = Timer() from app.common.utils import to_relative_str as relative freq = strtofreq[freqstr] df_macd = generate( app.bot.dfc.loc[pair, freq] ).dropna().tail(n_periods).asfreq(pdfreqstr) phases=[] last_iloc = 0 while last_iloc <= len(df_macd) - 1: phase = _get_phase(df_macd, last_iloc+1) if not phase: break phases.append(phase) last_iloc = phase['iloc'][1] stats = { 'pair':pair, 'freqstr':freqstr, 'periods':len(df_macd['macd_diff']), 'phases': len(phases) } #"{} MACD Phase Analysis\n"\ summary = "\n"\ "Freq: {}, Periods: {}, Total Phases: {}\n"\ .format( #pair, freqstr, len(df_macd['macd_diff']), len(phases)) for sign in ['POSITIVE', 'NEGATIVE']: grp = [ n for n in phases if n['sign'] == sign ] area = np.array([ n['area'] for n in grp ]) if len(area) == 0: area = np.array([0]) periods = np.array([ n['length'] for n in grp ]) if len(periods) == 0: periods = np.array([0]) duration = np.array([ n['seconds'] for n in grp]) if len(duration) == 0: duration = np.array([0]) price_diff = np.array([ pct_diff( n['df'].iloc[0]['close'], n['df'].iloc[-1]['close'] ) for n in grp ]) summary += \ "\t{} Phases: {}\n"\ "\tPrice: {:+.2f}% (mean: {:+.2f}%)\n"\ "\tArea: {:.2f} (mean: {:.2f})\n"\ "\tPeriods: {:} (mean: {:.2f})\n"\ "\tDuration: {:} (mean: {})\n"\ .format( sign.title(), len(grp), price_diff.sum(), price_diff.mean(), abs(area.sum()), abs(area.mean()), periods.sum(), periods.mean(), relative(delta(seconds=int(duration.sum()))), relative(delta(seconds=int(duration.mean()))) ) stats[sign] = { 'n_phases': len(grp), 'price_diff': pd.DataFrame(price_diff).describe().to_dict(), 'area': pd.DataFrame(area).describe().to_dict(), 'periods': pd.DataFrame(periods).describe().to_dict(), 'duration': pd.DataFrame(duration).describe().to_dict() } return { 'summary':summary, 'stats':stats, 'phases':phases, 'elapsed_ms':t1.elapsed() }
def run(e_pairs, e_kill): """Main trading loop thread. Consumes candle data from queue and manages/executes trades. TODO: add in code for tracking unclosed candle wicks prices: # Clear all partial candle data dfW = dfW.drop([(c['pair'], strtofreq(c['freqstr']))]) """ from main import q db = app.get_db() t1 = Timer() tmr1 = Timer(name='pos', expire='every 1 clock min utc', quiet=True) tmr10 = Timer(name='earn', expire='every 10 clock min utc', quiet=True) reports.positions() reports.earnings() n = 0 while True: if e_kill.isSet(): break ent_ids, ex_ids = [], [] # Trading algo inner loop. while q.empty() == False: c = q.get() candles.modify_dfc(c) ss = snapshot(c) query = { 'pair': c['pair'], 'freqstr': c['freqstr'], 'status': 'open' } # Eval position entries/exits for trade in db.trades.find(query): update_stats(trade, ss) ex_ids += eval_exit(trade, c, ss) if c['closed'] and c['pair'] in get_pairs(): ent_ids += eval_entry(c, ss) n += 1 # Reporting outer loop. if tmr1.remain() == 0: reports.positions() tmr1.reset() if tmr10.remain() == 0: reports.earnings() tmr10.reset() if len(ent_ids) + len(ex_ids) > 0: reports.trades(ent_ids + ex_ids) if n > 75: lock.acquire() print('{} queue items processed. [{:,.0f} ms/item]'\ .format(n, t1.elapsed()/n)) lock.release() t1.reset() n = 0 # Outer loop tail if len(ex_ids) > 0: if c['pair'] not in get_pairs(): # TODO: check no other open positions hold this pair, safe # for disabling. set_pairs([c['pair']], 'DISABLED') update_spinner() time.sleep(0.1) print('Trade thread: Terminating...')