def download_data(self, pair, start, end): """Download trade data and store as .csv file. Args: pair (str): Currency pair. start (int): Start UNIX of trade data to download. end (int): End UNIX of trade data to download. """ dataio = DataIO(savedir=self._savedir, fieldnames=self.FIELDNAMES) if dataio.csv_check(pair): last_row = dataio.csv_get_last(pair) newest_id = int(last_row['trade_id']) + 1 newest_t = int(last_row['time']) else: newest_id = self.__find_start_trade_id(pair, start) newest_t = 0 while newest_t < end: # new -> old r = self.__get_slice(pair, newest_id) # old -> new, add unix timestamp new_r = [] for row in r: row['time'] = row['T'] // 1000 row['date'] = timeutil.unix_to_iso(row['time']) row['price'] = row['p'] row['size'] = row['q'] row['side'] = 'sell' if row['m'] == True else 'buy' row['best_price_match'] = row['M'] row['trade_id'] = row['a'] row.pop('a', None) row.pop('p', None) row.pop('q', None) row.pop('f', None) row.pop('l', None) row.pop('T', None) row.pop('m', None) row.pop('M', None) new_r.append(row) # save to file dataio.csv_append(pair, new_r) # break condition if len(r) < self.__MAX_LIMIT: break # prepare next iteration newest_id = new_r[-1]['trade_id'] + 1 newest_t = new_r[-1]['time'] print('Binance\t| {} : {}'.format(timeutil.unix_to_iso(newest_t), pair)) print('Binance\t| Download complete : {}'.format(pair))
def download_data(self, pair, start, end): """Download trade data and store as .csv file. Args: pair (str): Currency pair. start (int): Start UNIX of trade data to download. end (int): End UNIX of trade data to download. """ dataio = DataIO(savedir=self._savedir, fieldnames=self.FIELDNAMES) if dataio.csv_check(pair): newest_t = float(dataio.csv_get_last(pair)['time']) else: newest_t = self.__find_start_trade_time(pair, start) while newest_t < end: # old -> new r = self.__get_slice(pair, newest_t + 1e-4) # list to dict r = self.__to_dict(r) # save to file dataio.csv_append(pair, r) # break condition if len(r) < self.__MAX_LIMIT: break # prepare next iteration newest_t = float(r[-1]['time']) print('Kraken\t| {} : {}'.format(timeutil.unix_to_iso(newest_t), pair)) print('Kraken\t| Download complete : {}'.format(pair))
def download_data(self, pair, start, end): """Download trade data and store as .csv file. Args: pair (str): Currency pair. start (int): Start UNIX of trade data to download. end (int): End UNIX of trade data to download. """ dataio = DataIO(savedir=self._savedir, fieldnames=self.FIELDNAMES) if dataio.csv_check(pair): last_row = dataio.csv_get_last(pair) newest_id = int(last_row['trade_id']) + 1 newest_t = int(last_row['time']) else: newest_id = self.__find_start_trade_id(pair, start) newest_t = 0 last_trade_id = self.__find_last_trade_id(pair) while newest_t < end: # new -> old r = self.__get_slice(pair, newest_id + self.__MAX_LIMIT) # break condition to_break = False # old -> new, add unix timestamp new_r = [] for row in reversed(r): if row['trade_id'] > newest_id: row['date'] = row['time'] row['time'] = timeutil.iso_to_unix(row['time']) new_r.append(row) if row['trade_id'] == last_trade_id: to_break = True # save to file dataio.csv_append(pair, new_r) # break condition if to_break: break # prepare next iteration newest_id = new_r[-1]['trade_id'] newest_t = new_r[-1]['time'] print('GDAX\t| {} : {}'.format(timeutil.unix_to_iso(newest_t), pair)) print('GDAX\t| Download complete : {}'.format(pair))
def download_data(self, pair, start, end): """Download trade data and store as .csv file. Args: pair (str): Currency pair. start (int): Start UNIX of trade data to download. end (int): End UNIX of trade data to download. """ dataio = DataIO(savedir=self._savedir, fieldnames=self.FIELDNAMES) if dataio.csv_check(pair): last_row = dataio.csv_get_last(pair) newest_id = int(last_row['trade_id']) + 1 newest_t = int(last_row['time']) else: newest_id = self.__find_start_trade_id(pair, start) newest_t = 0 while newest_t < end: # new -> old r = self.__get_slice(pair, newest_id) # old -> new, add unix timestamp new_r = [] for i, row in enumerate(r): row['time'] = timeutil.iso_to_unix(row['timestamp']) row['date'] = row['timestamp'] row['trade_id'] = newest_id + i row['side'] = row['side'].lower() row.pop('timestamp', None) row.pop('symbol', None) new_r.append(row) # save to file dataio.csv_append(pair, new_r) # break condition if len(r) < self.__MAX_LIMIT: break # prepare next iteration newest_id = new_r[-1]['trade_id'] + 1 newest_t = new_r[-1]['time'] print('Bitmex\t| {} : {}'.format( timeutil.unix_to_iso(newest_t), pair)) print('Bitmex\t| Download complete : {}'.format(pair))
def download_data(self, pair, start, end): """Download trade data and store as .csv file. Args: pair (str): Currency pair. start (int): Start UNIX of trade data to download. end (int): End UNIX of trade data to download. """ dataio = DataIO(savedir=self._savedir, fieldnames=self.FIELDNAMES) last_row = None if dataio.csv_check(pair): last_row = dataio.csv_get_last(pair) newest_t = int(last_row['time']) else: newest_t = self.__find_start_trade_time(pair, start) - 1 # break condition last_trade_time = self.__find_last_trade_time(pair) while newest_t < end: # new -> old r = self.__get_slice(pair, newest_t) # old -> new; remove duplicate data by trade ID new_r = [] for row in reversed(r): if last_row is not None: if int(last_row['tradeID']) >= row['tradeID']: continue # remove duplicates last_row = row row['time'] = timeutil.iso_to_unix(row['date']) new_r.append(row) if newest_t > last_trade_time: break # save to file dataio.csv_append(pair, new_r) # prepare next iteration newest_t += self.__MAX_RANGE print('Poloniex| {} : {}'.format( timeutil.unix_to_iso(newest_t), pair)) print('Poloniex| Download complete : {}'.format(pair))
def __to_dict(self, data): """Convert API trade data response into a list of dict. Args: data (list of list): API trade data response. Returns: List of python dict. """ new_data = [] for row in data: row_dict = { 'date': timeutil.unix_to_iso(row[2]), 'time': row[2], 'size': row[1], 'price': row[0], 'side': row[3], 'order_type': row[4] } new_data.append(row_dict) return new_data
def plot(ax, r, plot_title, show_legend=False): # format OHLC data dates = [timeutil.unix_to_iso(unix) for unix in r['time']] opens = r['open'] highs = r['high'] lows = r['low'] closes = r['close'] volumes = r['volume'] sell_volumes = r['sell_volume'] sell_weighted_avgs = r['sell_weighted_average'] buy_weighted_avgs = r['buy_weighted_average'] n_clip = 40 ax.grid() ax.set_title(plot_title) ax.set_xticks([x for x in range(len(dates) - n_clip)[::10]]) ax.set_xticklabels(dates[n_clip::10]) for tick in ax.get_xticklabels(): tick.set_rotation(45) # plot chart OHLC candlestick2_ohlc(ax=ax, opens=opens[n_clip:], highs=highs[n_clip:], lows=lows[n_clip:], closes=closes[n_clip:], width=0.6, colorup='green', colordown='red') # legend lines, labels = [], [] # close l, = ax.plot(closes[n_clip:], color='black', linewidth=0.5) lines.append(l) labels.append('Close') # weighted averages l, = ax.plot(sell_weighted_avgs[n_clip:], color='pink', linewidth=1.5) lines.append(l) labels.append('Weighted Sell Average') l, = ax.plot(buy_weighted_avgs[n_clip:], color='cyan', linewidth=1.5) lines.append(l) labels.append('Weighted Buy Average') # volume ax_twin = ax.twinx() ax_twin.yaxis.set_visible(False) ind = [x for x in range(len(volumes) - n_clip)] ax_twin.bar(ind, volumes[n_clip:], alpha=0.2) ax_twin.bar(ind, sell_volumes[n_clip:], color='red', alpha=0.2) # SMA n = 20 ma = datautil.ma(closes, n) l, = ax.plot(ma[n_clip:], color='blue', linewidth=1.5) lines.append(l) labels.append('SMA(n={})'.format(n)) # BB n = 20 k = 2 vol_std = datautil.vol(closes, n) std_upper = ma + vol_std * k std_lower = ma - vol_std * k ax.plot(std_upper[n_clip:], color='blue', linewidth=0.5) l, = ax.plot(std_lower[n_clip:], color='blue', linewidth=0.5) lines.append(l) labels.append('BB(k={},n={})'.format(k, n)) # EMA n = 20 ema = datautil.ema(closes, n) l, = ax.plot(ema[n_clip:], color='red') lines.append(l) labels.append('EMA(n={})'.format(n)) # EMA n = 30 ema = datautil.ema(closes, n) l, = ax.plot(ema[n_clip:], color='red', dashes=[3, 2]) lines.append(l) labels.append('EMA(n={})'.format(n)) # DEMA n = 20 dema = datautil.dema(closes, n) l, = ax.plot(dema[n_clip:], color='green') lines.append(l) labels.append('DEMA(n={})'.format(n)) # DEMA n = 30 dema = datautil.dema(closes, n) l, = ax.plot(dema[n_clip:], color='green', dashes=[3, 2]) lines.append(l) labels.append('DEMA(n={})'.format(n)) # TEMA n = 20 tema = datautil.tema(closes, n) l, = ax.plot(tema[n_clip:], color='orange') lines.append(l) labels.append('TEMA(n={})'.format(n)) # TEMA n = 30 tema = datautil.tema(closes, n) l, = ax.plot(tema[n_clip:], color='orange', dashes=[3, 2]) lines.append(l) labels.append('TEMA(n={})'.format(n)) # RSI n = 30 rsi = datautil.rsi(closes, n) ax_twin = ax.twinx() ax_twin.yaxis.set_visible(False) l, = ax_twin.plot(rsi[n_clip:], color='orange', alpha=0.5) lines.append(l) labels.append('RSI(n={})'.format(n)) # draw legend if show_legend: ax.legend(lines, labels, loc=2)
import timeutil if __name__ == '__main__': unix = 1500000000 # unix to ... iso = timeutil.unix_to_iso(unix) utc_date = timeutil.unix_to_utc_date(unix) local_date = timeutil.unix_to_local_date(unix) print('unix conversion:', iso) print('unix conversion:', utc_date) print('unix conversion:', local_date) # iso to ... unix_1 = timeutil.iso_to_unix(iso) utc_date_1 = timeutil.iso_to_utc_date(iso) local_date_1 = timeutil.iso_to_local_date(iso) print('iso conversion:', unix_1) print('iso conversion:', utc_date_1) print('iso conversion:', local_date_1) # utc_date to ... unix_2 = timeutil.utc_date_to_unix(utc_date) iso_2 = timeutil.utc_date_to_iso(utc_date) local_date_2 = timeutil.utc_date_to_local_date(utc_date) print('utc_date conversion:', unix_2) print('utc_date conversion:', iso_2) print('utc_date conversion:', local_date_2) # local_date to ... unix_3 = timeutil.local_date_to_unix(local_date)