def __get_klines(self, symbol, interval, size, since): """获取k线""" # if since is None: # klines = self.__client.get_klines(symbol=symbol, interval=interval, limit=size) # else: # klines = self.__client.get_klines(symbol=symbol, interval=interval, limit=size, startTime=since) interval_ms = kl.get_interval_timedelta( interval).total_seconds() * 1000 data_fileName = data_dir + "/" + symbol + "_" + interval + ".csv" klines = [] with open(data_fileName, "rt") as data_file: data_line = next(data_file) while True: try: data_line = next(data_file) data_values = data_line.split(",") open_time = int( (int(data_values[1]) / (1000 * 1000 * 1000)) * 1000) if open_time < since: continue close_time = open_time + interval_ms - 1 klines.append([ open_time, float(data_values[2]), float(data_values[3]), float(data_values[4]), float(data_values[5]), int(data_values[6]), close_time ]) if len(klines) == size: break except StopIteration: break return klines
def get_can_open_time(self, cfg): next_open_time_cfg_name = "n_o_t" delay_timedelta_cfg_name = "d_td" if next_open_time_cfg_name in cfg: _t1 = kl.get_next_open_time(cfg[next_open_time_cfg_name], self.now()) else: _t1 = None if delay_timedelta_cfg_name in cfg: _t2 = self.now() + kl.get_interval_timedelta( cfg[delay_timedelta_cfg_name]) else: _t2 = None if _t1 and _t2: return max(_t1, _t2) if _t1: return _t1 elif _t2: return _t2 else: return None
def setUp(self): self.symbol = "eth_usdt" self.digits = 6 self.interval = kl.KLINE_INTERVAL_1DAY self.display_count = 10 exchange_name = ex.BINANCE_SPOT_EXCHANGE_NAME self.md = DBMD(exchange_name) self.md.tick_time = datetime(2019, 1, 1, 8) self.klines = self.md.get_klines(self.symbol, self.interval, 150 + self.display_count) self.klines_df = pd.DataFrame(self.klines, columns=self.md.get_kline_column_names()) self.closeseat = self.md.get_kline_seat_close() self.highseat = self.md.get_kline_seat_high() self.lowseat = self.md.get_kline_seat_low() self.closekey = ex.get_kline_key_close(exchange_name) self.highkey = ex.get_kline_key_high(exchange_name) self.lowkey = ex.get_kline_key_low(exchange_name) self.count = 5000 self.steps = 1 self.td = kl.get_interval_timedelta( kl.KLINE_INTERVAL_1MINUTE) * self.steps
def __get_klines_1min_cache1(self, symbol, interval, s_time, e_time): """ 获取分钟k线 """ if interval not in self.k1ms_cache or ( interval in self.k1ms_cache and self.k1ms_cache_s_time[interval] != s_time): # 把整个间隔的分钟k线都取下来 next_interval_time = s_time + kl.get_interval_timedelta(interval) self.k1ms_cache[interval] = self.__get_klines_1min( symbol, s_time, next_interval_time) self.k1ms_cache_s_time[interval] = s_time tmp_len = int((e_time - s_time).total_seconds() / 60) if tmp_len >= len(self.k1ms_cache[interval]): return self.k1ms_cache[interval] e_timestamp = e_time.timestamp() * 1000 while tmp_len > 0: if self.k1ms_cache[interval][tmp_len][ self.kline_key_open_time] <= e_timestamp: break tmp_len -= 1 return self.k1ms_cache[interval][:tmp_len]
class TestDBMD(unittest.TestCase): symbol = "eth_usdt" digits = 4 interval = kl.KLINE_INTERVAL_1DAY interval_td = kl.get_interval_timedelta(interval) pre_count = 150 display_count = 10 tick_interval = kl.KLINE_INTERVAL_1MINUTE tick_collection = kl.get_kline_collection(symbol, tick_interval) tick_td = kl.get_interval_timedelta(tick_interval) start_time = datetime(2017, 9, 1) end_time = datetime(2020, 8, 1) md = DBMD("binance") def setUp(self): self.md.tick_time = self.start_time tick = (self.end_time - self.start_time) / self.tick_td print("tick td=%s, tick=%d, time rang: %s ~ %s" % (self.tick_td, tick, self.start_time, self.end_time)) def tearDown(self): pass def perf_get_json_klines(self): while self.md.tick_time < self.end_time: klines = self.md.get_json_klines( self.symbol, self.interval, self.pre_count + self.display_count) self.md.tick_time += self.tick_td def perf_get_klines(self): while self.md.tick_time < self.end_time: klines = self.md.get_klines(self.symbol, self.interval, self.pre_count + self.display_count) self.md.tick_time += self.tick_td def perf_get_klines_adv(self): total_interval_count = int((self.end_time - self.start_time) / self.interval_td) + self.pre_count print("total_interval_count: %s" % (total_interval_count)) interval_collection = kl.get_kline_collection(self.symbol, self.interval) interval_klines = self.md.get_original_klines( interval_collection, self.start_time - self.interval_td * self.pre_count, self.end_time) kl = interval_klines[0] print("open_time: %s" % (self.md.get_kline_open_time(kl))) #print("json: %s" % (kl)) ti.EMA(interval_klines, "close", 13) ti.EMA(interval_klines, "close", 21) ti.EMA(interval_klines, "close", 55) ti.BIAS_EMA(interval_klines, 13, 21) ti.BIAS_EMA(interval_klines, 21, 55) ti.RSI(interval_klines, "close") #print(interval_klines[self.pre_count:self.pre_count+2]) #print(interval_klines[-2:]) #pprint(interval_klines[self.pre_count]) #pprint(interval_klines[-1]) for i in range(self.pre_count + 1): if self.md.get_kline_open_time( interval_klines[i]) >= self.start_time: break interval_idx = i kl = interval_klines[interval_idx] print("interval_idx: %s" % (interval_idx)) print("open time: %s" % (self.md.get_kline_open_time(kl))) #print("json: %s" % (kl)) for i in range(interval_idx, len(interval_klines)): start_i = i - self.pre_count if start_i < 0: start_i = 0 history_kls = interval_klines[start_i:i] #print(len(history_kls)) interval_open_time = self.md.get_kline_open_time( interval_klines[i]) #print(interval_open_time) tick_klines = self.md.get_original_klines( self.tick_collection, interval_open_time, interval_open_time + self.interval_td) new_interval_kl = tick_klines[0] for tick_kl in tick_klines[1:]: tick_open_time = self.md.get_kline_open_time(tick_kl) #print(tick_open_time) new_interval_kl["close"] = tick_kl["close"] new_interval_kl["close_time"] = tick_kl["close_time"] if new_interval_kl["high"] < tick_kl["high"]: new_interval_kl["high"] = tick_kl["high"] if new_interval_kl["low"] > tick_kl["low"]: new_interval_kl["low"] = tick_kl["low"] cur_kls = history_kls + [new_interval_kl] ti.EMA(cur_kls, "close", 13) ti.EMA(cur_kls, "close", 21) ti.EMA(cur_kls, "close", 55) ti.BIAS_EMA(cur_kls, 13, 21) ti.BIAS_EMA(cur_kls, 21, 55) ti.RSI(cur_kls, "close")
if __name__ == "__main__": parser = add_common_arguments('fix') args = parser.parse_args() # print(args) if not (args.s and args.r and args.k and args.m): parser.print_help() exit(1) start_time, end_time = split_time_range(args.r) interval = args.k collection = kl.get_kline_collection(args.s, interval) td = kl.get_interval_timedelta(interval) period = kl.get_interval_seconds(interval) tick_time = kl.get_open_time(interval, start_time) if tick_time < start_time: tick_time = kl.get_open_time(interval, start_time+td) db = md.MongoDB(mongo_user, mongo_pwd, args.m, db_url) klines = db.find_sort(collection, {"open_time": { "$gte": int(start_time.timestamp())*1000, "$lt": int(end_time.timestamp())*1000}}, 'open_time', 1) i = 0 miss_count = 0 print(len(klines))
def run_2kls(engine, md, strategy, start_time, end_time, progress_disp=True): symbol = strategy.config["symbol"] tick_interval = kl.KLINE_INTERVAL_1MINUTE tick_collection = kl.get_kline_collection(symbol, tick_interval) tick_td = kl.get_interval_timedelta(tick_interval) kline_cfg = strategy.config["kline"] size = kline_cfg["size"] master_interval = kline_cfg["interval"] master_td = kl.get_interval_timedelta(master_interval) master_original_kls = md.get_original_klines( kl.get_kline_collection(symbol, master_interval), start_time - master_td * size, end_time) micro_interval = kline_cfg["micro_interval"] micro_td = kl.get_interval_timedelta(micro_interval) micro_original_kls = md.get_original_klines( kl.get_kline_collection(symbol, micro_interval), start_time - micro_td * size, end_time) if hasattr(strategy, "before_backtest"): strategy.before_backtest(master_original_kls, micro_original_kls) for master_start_idx in range(size + 1): master_start_open_time = md.get_kline_open_time( master_original_kls[master_start_idx]) + master_td if master_start_open_time >= start_time: break for micro_idx in range(size + 1 + int(master_td / micro_td)): micro_start_open_time = md.get_kline_open_time( micro_original_kls[micro_idx]) + micro_td if micro_start_open_time >= master_start_open_time: break pre_tick_cost_time = total_tick_cost_start = datetime.now() tick_count = 0 tick_klines = [] tick_idx = 0 for master_idx in range(master_start_idx, len(master_original_kls)): pre_start_i = master_idx - size if pre_start_i < 0: pre_start_i = 0 history_master_kls = master_original_kls[pre_start_i:master_idx + 1] new_master_open_time = md.get_kline_open_time( history_master_kls[-1]) + master_td new_master_close_time = new_master_open_time + master_td new_master_kl = None #print("new master open time: %s" % (new_master_open_time)) if tick_idx >= len(tick_klines): tick_klines = md.get_original_klines( tick_collection, new_master_open_time, new_master_open_time + 7 * master_td) tick_idx = 0 while (micro_idx < len(micro_original_kls)): if md.get_kline_open_time(micro_original_kls[micro_idx] ) + micro_td >= new_master_close_time: break if micro_idx > size: history_micro_kls = micro_original_kls[(micro_idx - size):micro_idx + 1] else: history_micro_kls = micro_original_kls[:micro_idx + 1] micro_idx += 1 new_micro_open_time = md.get_kline_open_time( history_micro_kls[-1]) + micro_td new_micro_close_time = new_micro_open_time + micro_td new_micro_kl = None #print("new micro open time: %s" % (new_micro_open_time)) while (tick_idx < len(tick_klines)): tick_kl = tick_klines[tick_idx] tick_open_time = md.get_kline_open_time(tick_kl) if tick_open_time >= new_micro_close_time: break tick_idx += 1 engine.log_info("tick_time: %s" % tick_open_time.strftime("%Y-%m-%d %H:%M:%S")) #print(tick_open_time) if not new_master_kl: new_master_kl = copy.copy(tick_kl) else: update_kl(md, new_master_kl, tick_kl) master_kls = history_master_kls + [new_master_kl] if not new_micro_kl: new_micro_kl = copy.copy(tick_kl) else: update_kl(md, new_micro_kl, tick_kl) micro_kls = history_micro_kls + [new_micro_kl] if md.kline_data_type == kl.KLINE_DATA_TYPE_LIST: master_kls = kl.trans_from_json_to_list( master_kls, md.kline_column_names) micro_kls = kl.trans_from_json_to_list( micro_kls, md.kline_column_names) md.tick_time = tick_open_time + tick_td strategy.on_tick(master_kls, micro_kls) tick_cost_time = datetime.now() engine.log_info("tick cost: %s \n\n" % (tick_cost_time - pre_tick_cost_time)) pre_tick_cost_time = tick_cost_time tick_count += 1 if progress_disp: progress = (master_idx + 1 - master_start_idx) / ( len(master_original_kls) - master_start_idx) sys.stdout.write( "%s progress: %d%%, cost: %s, open time: %s\r" % ( " " * 36, progress * 100, tick_cost_time - total_tick_cost_start, new_master_open_time.strftime("%Y-%m-%d %H:%M:%S"), )) sys.stdout.flush() return tick_count
def run_1kls(engine, md, strategy, start_time, end_time, progress_disp=True): symbol = strategy.config["symbol"] tick_interval = kl.KLINE_INTERVAL_1MINUTE tick_collection = kl.get_kline_collection(symbol, tick_interval) tick_td = kl.get_interval_timedelta(tick_interval) kline_cfg = strategy.config["kline"] size = kline_cfg["size"] master_interval = kline_cfg["interval"] master_td = kl.get_interval_timedelta(master_interval) master_kls = md.get_original_klines( kl.get_kline_collection(symbol, master_interval), start_time - master_td * size, end_time) if hasattr(strategy, "before_backtest"): strategy.before_backtest(master_kls) for i in range(size + 1): if md.get_kline_open_time(master_kls[i]) >= start_time: break master_idx = i pre_tick_cost_time = total_tick_cost_start = datetime.now() tick_count = 0 for i in range(master_idx, len(master_kls)): start_i = i - size if start_i < 0: start_i = 0 history_master_kls = master_kls[start_i:i] master_open_time = md.get_kline_open_time(master_kls[i]) tick_klines = md.get_original_klines(tick_collection, master_open_time, master_open_time + master_td) for j, tick_kl in enumerate(tick_klines): tick_open_time = md.get_kline_open_time(tick_kl) engine.log_info("tick_time: %s" % tick_open_time.strftime("%Y-%m-%d %H:%M:%S")) #print(tick_open_time) if j == 0: new_master_kl = tick_kl else: update_kl(md, new_master_kl, tick_kl) kls = history_master_kls + [new_master_kl] if md.kline_data_type == kl.KLINE_DATA_TYPE_LIST: kls = kl.trans_from_json_to_list(kls, md.kline_column_names) md.tick_time = tick_open_time + tick_td strategy.on_tick(kls) tick_cost_time = datetime.now() engine.log_info("tick cost: %s \n\n" % (tick_cost_time - pre_tick_cost_time)) pre_tick_cost_time = tick_cost_time tick_count += 1 if progress_disp: progress = (i + 1 - master_idx) / (len(master_kls) - master_idx) sys.stdout.write( "%s progress: %d%%, cost: %s, next open time: %s\r" % ( " " * 36, progress * 100, tick_cost_time - total_tick_cost_start, (master_open_time + master_td).strftime("%Y-%m-%d %H:%M:%S"), )) sys.stdout.flush() return tick_count
def download_from_exchange(exchange, db, symbol, kline_type, time_range): print('%12s %6s ' % (' ', kline_type), end='') collection = kl.get_kline_collection(symbol, kline_type) open_time_key = exchange.kline_key_open_time db.ensure_index(collection, [(open_time_key, 1)], unique=True) interval = kl.get_interval_timedelta(kline_type) if time_range: start_time, end_time = split_time_range(time_range) else: # 续接db中最后一条记录,至今天之前 klines = db.find_sort(collection, {}, open_time_key, -1, 1) if len(klines) > 0: start_time = ( exchange.get_time_from_data_ts(klines[0][open_time_key]) + interval) else: start_time = exchange.start_time end_time = datetime.now() #print(kl.get_open_time(kline_type, end_time)) """ if start_time.hour != exchange.start_time.hour: print("open time(%s) hour error! %s open time hour: %s" % (start_time, exchange.name, exchange.start_time.hour)) exit(1) if end_time.hour < exchange.start_time.hour: end_time -= timedelta(days=1) end_time = end_time.replace(hour=exchange.start_time.hour, minute=0, second=0, microsecond=0) """ end_time = end_time.replace(minute=0, second=0, microsecond=0) end_time = kl.get_open_time(kline_type, end_time) print("time range: %s ~ %s " % (start_time, end_time)) size = exchange.max_count_of_single_download_kl tmp_time = start_time while tmp_time < end_time: size_interval = size * interval if (tmp_time + size_interval) > end_time: batch = int((end_time - tmp_time) / interval) else: batch = size # print(batch) if batch == 0: break klines = exchange.get_klines( symbol, kline_type, size=batch, since=exchange.get_data_ts_from_time(tmp_time)) klines_df = pd.DataFrame(klines, columns=exchange.kline_column_names) klen = len(klines) print(" %20s start time: %s %s" % (' ', tmp_time, klen)) for i in range(klen - 1, -1, -1): last_open_time = exchange.get_time_from_data_ts( klines_df[open_time_key].values[i]) if last_open_time + interval <= end_time: break klines_df = klines_df.drop([i]) db_datalines = klines_df.to_dict('records') if len(db_datalines) == 0: break if not db.insert_many(collection, db_datalines): for item in db_datalines: db.insert_one(collection, item) last_time = exchange.get_time_from_data_ts( klines_df[open_time_key].values[-1]) + interval if last_time > tmp_time + batch * interval: batch = int((last_time - tmp_time) / interval) tmp_time += batch * interval
def run2(engine, md, strategy, start_time, end_time, progress_disp=True): """ run advance""" secs = strategy.config["sec"] if secs < 60: secs = 60 td_secs = timedelta(seconds=secs) symbol = strategy.config["symbol"] tick_interval = kl.KLINE_INTERVAL_1MINUTE tick_collection = kl.get_kline_collection(symbol, tick_interval) tick_td = kl.get_interval_timedelta(tick_interval) interval = strategy.config["kline"]["interval"] interval_collection = kl.get_kline_collection(symbol, interval) interval_td = kl.get_interval_timedelta(interval) size = strategy.config["kline"]["size"] interval_klines = md.get_original_klines(interval_collection, start_time - interval_td * size, end_time) kl_key_open_time = md.kline_key_open_time for i in range(size+1): if datetime.fromtimestamp(interval_klines[i][kl_key_open_time]/1000) >= start_time: break interval_idx = i pre_tick_cost_time = total_tick_cost_start = datetime.now() tick_count = 0 for i in range(interval_idx, len(interval_klines)): interval_open_time = datetime.fromtimestamp(interval_klines[i][kl_key_open_time]/1000) start_i = i - size if start_i < 0: start_i = 0 history_kls = interval_klines[start_i:i] #print(len(history_kls)) interval_open_ts = interval_klines[i][kl_key_open_time] interval_open_time = datetime.fromtimestamp(interval_open_ts/1000) #print(interval_open_time) tick_klines = md.get_original_klines(tick_collection, interval_open_time, interval_open_time + interval_td) for j, tick_kl in enumerate(tick_klines): tick_open_time = datetime.fromtimestamp(tick_kl[kl_key_open_time]/1000) engine.log_info("tick_time: %s" % tick_open_time.strftime("%Y-%m-%d %H:%M:%S")) #print(tick_open_time) if j == 0: new_interval_kl = tick_kl else: new_interval_kl[md.kline_key_close] = tick_kl[md.kline_key_close] new_interval_kl[md.kline_key_close_time] = tick_kl[md.kline_key_close_time] if new_interval_kl[md.kline_key_high] < tick_kl[md.kline_key_high]: new_interval_kl[md.kline_key_high] = tick_kl[md.kline_key_high] if new_interval_kl[md.kline_key_low] > tick_kl[md.kline_key_low]: new_interval_kl[md.kline_key_low] = tick_kl[md.kline_key_low] kls = history_kls + [new_interval_kl] if md.kline_data_type == kl.KLINE_DATA_TYPE_LIST: kls = kl.trans_from_json_to_list(kls, md.kline_column_names) md.tick_time = tick_open_time + tick_td strategy.on_tick(kls) tick_cost_time = datetime.now() engine.log_info("tick cost: %s \n\n" % (tick_cost_time - pre_tick_cost_time)) pre_tick_cost_time = tick_cost_time tick_count += 1 if progress_disp: progress = (i + 1 - interval_idx) / (len(interval_klines) - interval_idx) sys.stdout.write( "%s progress: %d%%, cost: %s, next open time: %s\r" % ( " "*36, progress * 100, tick_cost_time - total_tick_cost_start, (interval_open_time+interval_td).strftime("%Y-%m-%d %H:%M:%S"), ) ) sys.stdout.flush() return tick_count