def get_future_coin_liquidation_orders(self, start_str=None, end_str=None, autoCloseType=None, symbol=None, limit=50): """ If "autoCloseType" is not sent, orders with both of the types will be returned If "startTime" is not sent, data within 200 days before "endTime" can be queried """ start_ts = None # convert date string to milliseconds if type(start_str) == int: start_ts = start_str elif type(start_str) == str: start_ts = int(date_to_milliseconds(start_str)) # if an end time was passed convert it end_ts = None if end_str: if type(end_str) == int: end_ts = end_str else: end_ts = int(date_to_milliseconds(end_str)) else: end_ts = int(time.time() * 1000) return self._futures_coin_liquidation_orders( startTime=start_ts, endTime=end_ts, autoCloseType=autoCloseType, symbol=symbol, limit=limit)
def get_historical_candles(self, symbol, start_str, interval=KLINE_INTERVAL_15MINUTE, end_str=None, limit=500): output_data = [] timeframe = bhelp.interval_to_milliseconds(interval) if type(start_str) == int: start_ts = start_str else: start_ts = bhelp.date_to_milliseconds(start_str) first_valid_ts = self._get_earliest_valid_timestamp(symbol, interval) start_ts = max(start_ts, first_valid_ts) end_ts = None if end_str: if type(end_str) == int: end_ts = end_str else: end_ts = bhelp.date_to_milliseconds(end_str) params = { "symbol": symbol, "interval": interval, "limit": limit, "startTime": start_ts, "endTime": end_ts } idx = 0 while True: temp = self.get_candles(**params) if not len(temp): break output_data += temp params["startTime"] = temp[-1][0] idx += 1 if len(temp) < limit: break params["startTime"] += timeframe if idx % 3 == 0: time.sleep(1) return output_data
def __init__(self, binance_client: Client, symbol: str, from_date: str, to_date=None): self.client = binance_client self.symbol = symbol self.from_date = date_to_milliseconds(from_date) self.to_date = to_date
def aggregate_trade_iter(self, symbol, start_str=None, last_id=None): #You can only specify one of the two if start_str is not None and last_id is not None: raise ValueError( "start_time and last_id may not be simultaneously specified.") if last_id is None: #Without a last_id, we actually need the first trade. if start_str is None: params = {"symbol": symbol, "fromId": 0} trades = self.get_aggregate_trades(**params) else: if type(start_str) == int: start_ts = start_str else: start_ts = bhelp.date_to_milliseconds(start_str) while True: #start time + an hour in milliseconds end_ts = start_ts + (60 * 60 * 1000) params = { "symbol": symbol, "StartTime": start_ts, "endTime": end_ts } trades = self.get_aggregate_trades(**params) if len(trades) > 0: break if end_ts > int(time.time() * 1000): return start_ts = end_ts for trade in trades: yield trade last_id = trades[-1][self.AGG_ID] params = {"symbol": symbol, "fromId": last_id} while True: trades = self.get_aggregate_trades(**params) if len(trades) == 1: return trades.pop(0) for trade in trades: yield trade params["fromId"] = trades[-1][self.AGG_ID]
def update_csv(filename, pair='BTCUSDT', interval='1m'): """Update the save Klines""" print("Updating CSV") last_line = next(reverse_readline(filename)).split( ',') # use the reverse generator time = date_to_milliseconds(last_line[7]) + interval_to_milliseconds( interval) #get last missed kline open time klines_diff = client.get_historical_klines( pair, '1m', time) # fetch difference from api #print(klines_diff) for kline in klines_diff: # write new klines to file print(kline) kline = convert_format(kline, pair) write_to_csv([kline], filename)
def update_panels(pair='BTCUSDT'): interval = [30, 7, 1] print('on pair', pair) for i in interval: date_now = date_to_milliseconds('now') // (1000) #print(date_now) date = datetime.utcfromtimestamp(date_now - i * 86400).strftime('%Y/%m/%d %H:%M:00') #print(date) print('On interval days:', i) table = get_from_csv(pair, date, 'now', True) print('writing to file') file_to_write = 'prerendered/pre' + pair + str(i) + '.csv' write_to_csv(table, file_to_write, append=False) print('Finished updating prerendered files of ', pair)
def get_historical_candles_generator(self, symbol, start_str, interval=KLINE_INTERVAL_15MINUTE, end_str=None): limit = 1000 timeframe = bhelp.interval_to_milliseconds(interval) start_ts = int(start_str) + timeframe first_valid_ts = self._get_earliest_valid_timestamp(symbol, interval) start_ts = max(start_ts, first_valid_ts) end_ts = None if end_str: if type(end_str) == int: end_ts = end_str else: end_ts = bhelp.date_to_milliseconds(end_str) params = { "symbol": symbol, "interval": interval, "limit": limit, "startTime": start_ts, "endTime": end_ts } while True: output_data = np.array(self.get_candles(**params)) if len(output_data) == 0 or len(output_data) == 1: break if len(output_data) < limit: output_data = np.delete(output_data, -1, axis=0) for output in output_data: yield output params["startTime"] = int(output_data[-1, 0]) if len(output_data) < limit: break params["startTime"] += timeframe
def _start_ts_over_expiry(self, start_ts, symbol): expiry = symbol.split('_', 1)[1] expiry = '20' + expiry # Add 1 day to date of expiry of contract date_obj_expiry = datetime.strptime(expiry, '%Y%m%d') + timedelta(days=1) date_str_expiry = date_obj_expiry.strftime('%Y-%m-%d') date_ts_expiry = date_to_milliseconds(date_str_expiry) if start_ts > date_ts_expiry: return True else: return False
def get_average_price(self, order_book, token_pair): # buy orders buy_orders = order_book['bids'] # sell orders sell_orders = order_book['asks'] sell_average_price, sell_quantity = self.get_selling_summary( sell_orders) buy_average_price, buy_quantity = self.get_buying_summary( buy_orders, sell_quantity) sell_average_price = float(sell_average_price) buy_average_price = float(buy_average_price) # check and print out profit that we've made so far self.check_profit(token_pair, buy_orders, sell_orders) market_status = "stable" if buy_quantity > sell_quantity * 1.3: market_status = "increase" if sell_quantity > buy_quantity * 1.3: market_status = "decrease" if market_status == "increase": buy_order = buy_orders[len(buy_orders) / 4] if float( buy_order[0] ) * 1.05 < token_pair.last_sell_price and token_pair.last_action != "buy": token_pair.buy(float(buy_order[0])) if market_status == "decrease": sell_order = sell_orders[len(sell_orders) / 4] if float( sell_order[0] ) > token_pair.last_buy_price * 1.05 and token_pair.last_action != "sell": token_pair.sell(float(sell_order[0])) print("%s market state: %s" % (token_pair.name, market_status)) # delete pending orders that live too long pending_orders = token_pair.get_pending_orders() for pending_order in pending_orders: if pending_order['time'] < date_to_milliseconds( "10 minute ago UTC"): token_pair.cancel_order(pending_order['orderId']) return buy_average_price, sell_average_price, buy_quantity, sell_quantity
# ===================================================================== # ===================================================================== start = "1 Dec, 2017" end = "1 Mar, 2018" symbol = "BNBBTC" interval = Client.KLINE_INTERVAL_1MINUTE if __name__ == '__main__': client = Client("iPImn0wZ0QfRrMF1oVdHZts2KljM446S4l8K5rhpmT3Ja93d2ZtZeBviCRLO2ZXR", "g4U49CoSQGLJk5hKo0gGtfjXtLBwrGQqO81tVp3vKPBzaYllBmWvwZvfbJH0xAvB") klines = client.get_historical_klines(symbol, interval, "1 day ago UTC") print(len(klines)) with open( "Binance_{}_{}_{}-{}.json".format( symbol, interval, date_to_milliseconds(start), date_to_milliseconds(end) ), 'w' # set file write mode ) as f: f.write(json.dumps(klines)) #np.savetxt('data.csv', klines, delimiter=',')
import json from binance.client import Client from binance.helpers import date_to_milliseconds import time import os.path #start = "14 hours ago UTC" start = "11 March 2018 12:41 pm EST" end = "now" interval = Client.KLINE_INTERVAL_2HOUR TWOHOURS_MS = 7200000 s = date_to_milliseconds(start) - (TWOHOURS_MS*30) # need to start 30 intervals earlier because it needs historical data to work on e = date_to_milliseconds(end) client = Client("", "") info = client.get_exchange_info() for d in info["symbols"]: if "BTC" in d["symbol"]: if os.path.exists("Binance_{}_{}_{}-{}.json".format( d["symbol"], interval, s, e )): print("Data for " + d["symbol"] + " exists. Skipping...") continue print(d["symbol"]) klines = client.get_historical_klines(d["symbol"], interval, str(s), str(e)) if len(klines) == 0: print("Data for " + d["symbol"] + " doesn't exist for this period. Skipping...")
def _future_coin_historical_klines( self, symbol, interval, start_str, end_str=None, limit=500, ): """Get Historical Klines from Binance (spot or futures) See dateparser docs for valid start and end string formats http://dateparser.readthedocs.io/en/latest/ If using offset strings for dates add "UTC" to date string e.g. "now UTC", "11 hours ago UTC" :param symbol: Name of symbol pair e.g BNBBTC :type symbol: str :param interval: Binance Kline interval :type interval: str :param start_str: Start date string in UTC format or timestamp in milliseconds :type start_str: str|int :param end_str: optional - end date string in UTC format or timestamp in milliseconds (default will fetch everything up to now) :type end_str: str|int :param limit: Default 500; max 1000. :type limit: int :param limit: Default 500; max 1000. :type limit: int :param spot: Historical klines from spot endpoint, otherwise futures :type spot: bool :return: list of OHLCV values """ # init our list output_data = [] # setup the max limit limit = limit # convert interval to useful value in seconds timeframe = int(interval_to_milliseconds(interval)) # convert our date strings to milliseconds if type(start_str) == int: start_ts = start_str else: start_ts = int(date_to_milliseconds(start_str)) # if an end time was passed convert it end_ts = None if end_str: if type(end_str) == int: end_ts = end_str else: end_ts = int(date_to_milliseconds(end_str)) else: end_ts = start_ts + limit * timeframe serv_time = self.get_server_time()['serverTime'] end_ts = min(serv_time, end_ts) end_ts = int(end_ts // 1) idx = 0 empty_count = 0 while True: temp_data = self.futures_coin_klines(symbol=symbol, interval=interval, limit=limit, startTime=start_ts, endTime=end_ts) # print(f"""start: {datetime.utcfromtimestamp(start_ts/1000).strftime('%Y-%m-%d %H:%M:%S')}\ # end: {datetime.utcfromtimestamp(end_ts/1000).strftime('%Y-%m-%d %H:%M:%S')}) # """) # handle the case where exactly the limit amount of data was returned last loop if not len(temp_data): if not len(output_data): # if no data, check the next day start_ts = int( start_ts + interval_to_milliseconds(self.KLINE_INTERVAL_1DAY)) else: # if there was data before, and not anymore, add one to empty count empty_count += 1 else: # in case there is data, take the next start as the last end plus 1 interval start_ts = temp_data[-1][0] start_ts += timeframe # reset empty count because we received data empty_count = 0 # append this loops data to our output data output_data += temp_data # set our start timestamp using the last value in the array idx += 1 # increment next call by our timeframe; end_ts = start_ts + limit * timeframe serv_time = self.get_server_time()['serverTime'] # calculation to floor the result and keep int end_ts = min(serv_time, end_ts) end_ts = int(end_ts // 1) # check if we received less than the required limit and exit the loop if start_ts >= serv_time: # exit the while loop break if '_2' in symbol: if self._start_ts_over_expiry(start_ts, symbol): break # sleep after every 3rd call to be kind to the API if idx % 3 == 0: time.sleep(0.5) if empty_count == 3: print('3 consecutive empty response') # if 3 consecutives call dont give data, then stop break return output_data
def rawtab(filename='rawtab_BTCUSDT.csv', pair='BTCUSDT'): """ Method that is run within a thread. Maintains a data structure (list of dictionaries) in plain old memory. """ indicator = False count = 0 interval_seconds = 60 #with lock: update_csv(filename, pair) from prerendr import update_panels print('Updating Preprocessed CSVs', threading.current_thread().name, os.getpid()) update_panels(pair) date_now = date_to_milliseconds('now') // 1000 date = datetime.utcfromtimestamp(date_now - 1140).strftime('%Y/%m/%d %H:%M:00') supported_pairs[pair] = get_from_csv(pair, date, 'now') while True: count = count + 1 #check status of binance server if check_status(): supported_pairs[pair].append( {'Error': 'binance_server_maintenance'}) break #fetch kline from binance and convert format to ours klines = client.get_klines(symbol=pair, interval='1m', limit=1) for kline in klines: # list of list only has one list entry = convert_format(kline, pair) # add entry to rawtab supported_pairs[pair].append(entry) print('csv', threading.current_thread().name, os.getpid()) if len(supported_pairs[pair]) > 20: entry = supported_pairs[pair].pop( 0) # keep table at 20 entries (might remove this) if count >= 20: # save new klines to file every count seconds count = 0 update_csv(filename, pair) tenSMA, twentySMA = calcMovAvg( supported_pairs[pair]) # do some data calc sheit if tenSMA > twentySMA: indicator = True # buy else: indicator = False # sell #creates a new csv file with just these values newdict = { 'Open_Time': supported_pairs[pair][-1]['Open_Time'], 'Close_time': supported_pairs[pair][-1]['Close_time'], 'Open_Price': supported_pairs[pair][-1]['Open_Price'], 'indicator': indicator, '10-SMA': tenSMA, '20-SMA': twentySMA } write_to_csv([newdict], filename.split('.')[0] + '_indicator2.csv') # this looks horrendous lol, but we add it to the current entry supported_pairs[pair][-1]['indicator'] = str(indicator) supported_pairs[pair][-1]['10-SMA'] = str(tenSMA) supported_pairs[pair][-1]['20-SMA'] = str(twentySMA) else: supported_pairs[pair][-1]['indicator'] = str(indicator) supported_pairs[pair][-1]['10-SMA'] = 0 supported_pairs[pair][-1]['20-SMA'] = 0 time.sleep(interval_seconds)