def __on_kline_data(self, data): event_type = data.get('e', '') if event_type == 'kline': k = data['k'] symbol = k['s'] timestamp = k['t'] * 0.001 tf = self.REV_TF_MAP[k['i']] candle = Candle(timestamp, tf) # only price, no spread candle.set_bid_ohlc(float(k['o']), float(k['h']), float(k['l']), float(k['c'])) candle.set_ofr_ohlc(float(k['o']), float(k['h']), float(k['l']), float(k['c'])) candle.set_volume(float(k['v'])) candle.set_consolidated(k['x']) self.service.notify(Signal.SIGNAL_CANDLE_DATA, self.name, (symbol, candle)) if k['x'] and not self._read_only: # write only consolidated candles. values are string its perfect Database.inst().store_market_ohlc( (self.name, symbol, int(k['t']), tf, k['o'], k['h'], k['l'], k['c'], k['o'], k['h'], k['l'], k['c'], k['v']))
def query(self, timeframe, from_date, to_date, limit_or_last_n, auto_close=True): """ Query ohlcs for a timeframe. @param from_date Optional @param to_date Optional @param limit_or_last_n Optional """ cursor = self._db.cursor() try: if from_date and to_date: from_ts = int(from_date.timestamp() * 1000.0) to_ts = int(to_date.timestamp() * 1000.0) self.query_from_to(cursor, timeframe, from_date, to_date) elif from_date: from_ts = int(from_date.timestamp() * 1000.0) self.query_from_limit(cursor, timeframe, from_date, limit_or_last_n) elif to_date: to_ts = int(to_date.timestamp() * 1000.0) self.query_from_limit(cursor, timeframe, to_date) elif limit: self.query_last(cursor, timeframe, limit_or_last_n) else: self.query_all(cursor, timeframe) except Exception as e: logger.error(repr(e)) self.close() return [] rows = cursor.fetchall() ohlcs = [] for row in rows: timestamp = float(row[0]) * 0.001 # to float second timestamp ohlc = Candle(timestamp, timeframe) ohlc.set_bid_ohlc(float(row[1]), float(row[2]), float(row[3]), float(row[4])) ohlc.set_ofr_ohlc(float(row[5]), float(row[6]), float(row[7]), float(row[8])) ohlc.set_volume(float(row[9])) ohlcs.append(ohlc) if auto_close: self.close() return data
def process_ohlc(self): # # select market ohlcs # with self._mutex: mks = self._pending_ohlc_select self._pending_ohlc_select = [] if mks: try: cursor = self._db.cursor() for mk in mks: if mk[6]: # last n cursor.execute("""SELECT COUNT(*) FROM ohlc WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s""" % (mk[1], mk[2], mk[3])) count = int(cursor.fetchone()[0]) offset = max(0, count - mk[6]) # LIMIT should not be necessary then cursor.execute("""SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s ORDER BY timestamp ASC LIMIT %i OFFSET %i""" % ( mk[1], mk[2], mk[3], mk[6], offset)) elif mk[4] and mk[5]: # from to cursor.execute("""SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s AND timestamp >= %i AND timestamp <= %i ORDER BY timestamp ASC""" % ( mk[1], mk[2], mk[3], mk[4], mk[5])) elif mk[4]: # from to now cursor.execute("""SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s AND timestamp >= %i ORDER BY timestamp ASC""" % ( mk[1], mk[2], mk[3], mk[4])) elif mk[5]: # to now cursor.execute("""SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s AND timestamp <= %i ORDER BY timestamp ASC""" % ( mk[1], mk[2], mk[3], mk[5])) else: # all cursor.execute("""SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s ORDER BY timestamp ASC""" % ( mk[1], mk[2], mk[3])) rows = cursor.fetchall() ohlcs = [] for row in rows: timestamp = float(row[0]) * 0.001 # to float second timestamp ohlc = Candle(timestamp, mk[3]) ohlc.set_bid_ohlc(float(row[1]), float(row[2]), float(row[3]), float(row[4])) ohlc.set_ofr_ohlc(float(row[5]), float(row[6]), float(row[7]), float(row[8])) # if float(row[9]) <= 0: # # prefer to ignore empty volume ohlc because it can broke volume signal and it is a no way but it could be # # a lack of this information like on SPX500 of ig.com. So how to manage that cases... # continue ohlc.set_volume(float(row[9])) if ohlc.timestamp >= Instrument.basetime(mk[3], time.time()): ohlc.set_consolidated(False) # current ohlcs.append(ohlc) # notify mk[0].notify(Signal.SIGNAL_CANDLE_DATA_BULK, mk[1], (mk[2], mk[3], ohlcs)) except self.psycopg2.OperationalError as e: self.try_reconnect(e) # retry the next time with self._mutex: self._pending_ohlc_select = mks + self._pending_ohlc_select except Exception as e: self.on_error(e) # retry the next time with self._mutex: self._pending_ohlc_select = mks + self._pending_ohlc_select # # insert market ohlcs # if time.time() - self._last_ohlc_flush >= 60 or len(self._pending_ohlc_insert) > 500: with self._mutex: mkd = self._pending_ohlc_insert self._pending_ohlc_insert = [] if mkd: try: cursor = self._db.cursor() elts = [] data = set() for mk in mkd: if (mk[0], mk[1], mk[2], mk[3]) not in data: elts.append("('%s', '%s', %i, %i, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % (mk[0], mk[1], mk[2], mk[3], mk[4], mk[5], mk[6], mk[7], mk[8], mk[9], mk[10], mk[11], mk[12])) data.add((mk[0], mk[1], mk[2], mk[3])) query = ' '.join(("INSERT INTO ohlc(broker_id, market_id, timestamp, timeframe, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume) VALUES", ','.join(elts), "ON CONFLICT (broker_id, market_id, timestamp, timeframe) DO UPDATE SET bid_open = EXCLUDED.bid_open, bid_high = EXCLUDED.bid_high, bid_low = EXCLUDED.bid_low, bid_close = EXCLUDED.bid_close, ask_open = EXCLUDED.ask_open, ask_high = EXCLUDED.ask_high, ask_low = EXCLUDED.ask_low, ask_close = EXCLUDED.ask_close, volume = EXCLUDED.volume")) # query = ' '.join(( # "INSERT INTO ohlc(broker_id, market_id, timestamp, timeframe, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume) VALUES", # ','.join(["('%s', '%s', %i, %i, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % (mk[0], mk[1], mk[2], mk[3], mk[4], mk[5], mk[6], mk[7], mk[8], mk[9], mk[10], mk[11], mk[12]) for mk in mkd]), # "ON CONFLICT (broker_id, market_id, timestamp, timeframe) DO UPDATE SET bid_open = EXCLUDED.bid_open, bid_high = EXCLUDED.bid_high, bid_low = EXCLUDED.bid_low, bid_close = EXCLUDED.bid_close, ask_open = EXCLUDED.ask_open, ask_high = EXCLUDED.ask_high, ask_low = EXCLUDED.ask_low, ask_close = EXCLUDED.ask_close, volume = EXCLUDED.volume" # )) cursor.execute(query) self._db.commit() except self.psycopg2.OperationalError as e: self.try_reconnect(e) # retry the next time with self._mutex: self._pending_ohlc_insert = mkd + self._pending_ohlc_insert except Exception as e: self.on_error(e) # retry the next time with self._mutex: self._pending_ohlc_insert = mkd + self._pending_ohlc_insert self._last_ohlc_flush = time.time() # # insert market liquidation # with self._mutex: mkd = self._pending_liquidation_insert self._pending_liquidation_insert = [] if mkd: try: cursor = self._db.cursor() elts = [] for mk in mkd: elts.append("('%s', '%s', %i, %i, '%s', '%s')" % (mk[0], mk[1], mk[2], mk[3], mk[4], mk[5])) # query = ' '.join(("INSERT INTO liquidation(broker_id, market_id, timestamp, direction, price, quantity) VALUES", # ','.join(elts), # "ON CONFLICT (broker_id, market_id, timestamp) DO NOTHING")) query = ' '.join(("INSERT INTO liquidation(broker_id, market_id, timestamp, direction, price, quantity) VALUES", ','.join(elts))) cursor.execute(query) self._db.commit() except psycopg2.OperationalError as e: self.try_reconnect(e) # retry the next time with self._mutex: self._pending_liquidation_insert = mkd + self._pending_liquidation_insert except Exception as e: self.on_error(e) # retry the next time with self._mutex: self._pending_liquidation_insert = mkd + self._pending_liquidation_insert # # clean older ohlcs # if self._autocleanup: if time.time() - self._last_ohlc_clean >= OhlcStorage.CLEANUP_DELAY: try: now = time.time() cursor = self._db.cursor() for timeframe, timestamp in OhlcStorage.CLEANERS: ts = int(now - timestamp) * 1000 # @todo make a count before cursor.execute("DELETE FROM ohlc WHERE timeframe <= %i AND timestamp < %i" % (timeframe, ts)) self._db.commit() except psycopg2.OperationalError as e: self.try_reconnect(e) except Exception as e: self.on_error(e) self._last_ohlc_clean = time.time()
def fetch_and_generate(self, market_id, timeframe, from_date=None, to_date=None, n_last=1000, fetch_option="", cascaded=None): if timeframe > 0 and timeframe not in self.GENERATED_TF: logger.error("Timeframe %i is not allowed !" % (timeframe, )) return generators = [] from_tf = timeframe self._last_ticks = [] self._last_ohlcs = {} if not from_date and n_last: # compute a from date today = datetime.now().astimezone(UTC()) if timeframe >= Instrument.TF_MONTH: from_date = ( today - timedelta(months=int(timeframe / Instrument.TF_MONTH) * n_last)).replace(day=1).replace(hour=0).replace( minute=0).replace(second=0) elif timeframe >= Instrument.TF_1D: from_date = (today - timedelta( days=int(timeframe / Instrument.TF_1D) * n_last)).replace( hour=0).replace(minute=0).replace(second=0) elif timeframe >= Instrument.TF_1H: from_date = (today - timedelta( hours=int(timeframe / Instrument.TF_1H) * n_last)).replace( minute=0).replace(second=0) elif timeframe >= Instrument.TF_1M: from_date = ( today - timedelta(minutes=int(timeframe / Instrument.TF_1M) * n_last)).replace(second=0) elif timeframe >= Instrument.TF_1S: from_date = (today - timedelta( seconds=int(timeframe / Instrument.TF_1S) * n_last)) from_date = from_date.replace(microsecond=0) if not to_date: today = datetime.now().astimezone(UTC()) if timeframe == Instrument.TF_MONTH: to_date = today + timedelta(months=1) else: to_date = today + timedelta(seconds=timeframe) to_date = to_date.replace(microsecond=0) # cascaded generation of candles if cascaded: for tf in Fetcher.GENERATED_TF: if tf > timeframe: # from timeframe greater than initial if tf <= cascaded: # until max cascaded timeframe generators.append(CandleGenerator(from_tf, tf)) from_tf = tf # store for generation self._last_ohlcs[tf] = [] else: from_tf = tf if timeframe > 0: self._last_ohlcs[timeframe] = [] n = 0 t = 0 if timeframe == 0: for data in self.fetch_trades(market_id, from_date, to_date, None): # store (int timestamp in ms, str bid, str ofr, str volume) Database.inst().store_market_trade( (self.name, market_id, data[0], data[1], data[2], data[3])) if generators: self._last_ticks.append( (float(data[0]) * 0.001, float(data[1]), float(data[2]), float(data[3]))) # generate higher candles for generator in generators: if generator.from_tf == 0: candles = generator.generate_from_ticks( self._last_ticks) if candles: for c in candles: self.store_candle(market_id, generator.to_tf, c) self._last_ohlcs[generator.to_tf] += candles # remove consumed ticks self._last_ticks = [] else: candles = generator.generate_from_candles( self._last_ohlcs[generator.from_tf]) if candles: for c in candles: self.store_candle(market_id, generator.to_tf, c) self._last_ohlcs[generator.to_tf] += candles # remove consumed candles self._last_ohlcs[generator.from_tf] = [] n += 1 t += 1 if n == 10000: n = 0 Terminal.inst().info("%i trades for %s..." % (t, market_id)) # calm down the storage of tick, if parsing is faster while Database.inst().num_pending_ticks_storage( ) > Fetcher.MAX_PENDING_TICK: time.sleep(Fetcher.TICK_STORAGE_DELAY ) # wait a little before continue logger.info("Fetched %i trades for %s" % (t, market_id)) elif timeframe > 0: for data in self.fetch_candles(market_id, timeframe, from_date, to_date, None): # store (int timestamp ms, str open bid, high bid, low bid, close bid, open ofr, high ofr, low ofr, close ofr, volume) Database.inst().store_market_ohlc( (self.name, market_id, data[0], int(timeframe), data[1], data[2], data[3], data[4], data[5], data[6], data[7], data[8], data[9])) if generators: candle = Candle(float(data[0]) * 0.001, timeframe) candle.set_bid_ohlc(float(data[1]), float(data[2]), float(data[3]), float(data[4])) candle.set_ofr_ohlc(float(data[5]), float(data[6]), float(data[7]), float(data[8])) candle.set_volume(float(data[9])) candle.set_consolidated(True) self._last_ohlcs[timeframe].append(candle) # generate higher candles for generator in generators: candles = generator.generate_from_candles( self._last_ohlcs[generator.from_tf]) if candles: for c in candles: self.store_candle(market_id, generator.to_tf, c) self._last_ohlcs[generator.to_tf].extend(candles) # remove consumed candles self._last_ohlcs[generator.from_tf] = [] n += 1 t += 1 if n == 1000: n = 0 Terminal.inst().info( "%i candles for %s in %s..." % (t, market_id, timeframe_to_str(timeframe))) logger.info("Fetched %i candles for %s in %s" % (t, market_id, timeframe_to_str(timeframe)))
def process_ohlc(self): # # select market ohlcs # self.lock() mks = self._pending_ohlc_select self._pending_ohlc_select = [] self.unlock() if mks: try: cursor = self._db.cursor() for mk in mks: if mk[6]: # last n cursor.execute( """SELECT COUNT(*) FROM ohlc WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s""" % (mk[1], mk[2], mk[3])) count = int(cursor.fetchone()[0]) offset = max(0, count - mk[6]) # LIMIT should not be necessary then cursor.execute( """SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s ORDER BY timestamp ASC LIMIT %i OFFSET %i""" % (mk[1], mk[2], mk[3], mk[6], offset)) elif mk[4] and mk[5]: # from to cursor.execute( """SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s AND timestamp >= %i AND timestamp <= %i ORDER BY timestamp ASC""" % (mk[1], mk[2], mk[3], mk[4], mk[5])) elif mk[4]: # from to now cursor.execute( """SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s AND timestamp >= %i ORDER BY timestamp ASC""" % (mk[1], mk[2], mk[3], mk[4])) elif mk[5]: # to now cursor.execute( """SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s AND timestamp <= %i ORDER BY timestamp ASC""" % (mk[1], mk[2], mk[3], mk[5])) else: # all cursor.execute( """SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s ORDER BY timestamp ASC""" % (mk[1], mk[2], mk[3])) rows = cursor.fetchall() ohlcs = [] for row in rows: timestamp = float( row[0]) * 0.001 # to float second timestamp ohlc = Candle(timestamp, mk[3]) ohlc.set_bid_ohlc(float(row[1]), float(row[2]), float(row[3]), float(row[4])) ohlc.set_ofr_ohlc(float(row[5]), float(row[6]), float(row[7]), float(row[8])) # if float(row[9]) <= 0: # # prefer to ignore empty volume ohlc because it can broke volume signal and it is a no way but it could be # # a lack of this information like on SPX500 of ig.com. So how to manage that cases... # continue ohlc.set_volume(float(row[9])) ohlcs.append(ohlc) # notify mk[0].notify(Signal.SIGNAL_CANDLE_DATA_BULK, mk[1], (mk[2], mk[3], ohlcs)) except Exception as e: # check database for valide ohlc and volumes logger.error(repr(e)) # retry the next time self.lock() self._pending_ohlc_select = mks + self._pending_ohlc_select self.unlock() # # insert market ohlcs # if time.time() - self._last_ohlc_flush >= 60 or len( self._pending_ohlc_insert) > 500: self.lock() mkd = self._pending_ohlc_insert self._pending_ohlc_insert = [] self.unlock() if mkd: try: cursor = self._db.cursor() query = ' '.join(( "INSERT INTO ohlc(broker_id, market_id, timestamp, timeframe, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume) VALUES", ','.join([ "('%s', '%s', %i, %i, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % (mk[0], mk[1], mk[2], mk[3], mk[4], mk[5], mk[6], mk[7], mk[8], mk[9], mk[10], mk[11], mk[12]) for mk in mkd ]), "ON CONFLICT (broker_id, market_id, timestamp, timeframe) DO UPDATE SET bid_open = EXCLUDED.bid_open, bid_high = EXCLUDED.bid_high, bid_low = EXCLUDED.bid_low, bid_close = EXCLUDED.bid_close, ask_open = EXCLUDED.ask_open, ask_high = EXCLUDED.ask_high, ask_low = EXCLUDED.ask_low, ask_close = EXCLUDED.ask_close, volume = EXCLUDED.volume" )) cursor.execute(query) self._db.commit() except Exception as e: logger.error(repr(e)) # retry the next time self.lock() self._pending_ohlc_insert = mkd + self._pending_ohlc_insert self.unlock() self._last_ohlc_flush = time.time() # # clean older ohlcs # if time.time() - self._last_ohlc_clean >= OhlcStorage.CLEANUP_DELAY: try: now = time.time() cursor = self._db.cursor() for timeframe, timestamp in OhlcStorage.CLEANERS: ts = int(now - timestamp) * 1000 # @todo make a count before cursor.execute( "DELETE FROM ohlc WHERE timeframe <= %i AND timestamp < %i" % (timeframe, ts)) self._db.commit() except Exception as e: logger.error(repr(e)) self._last_ohlc_clean = time.time()
def fetch_and_generate(self, market_id, timeframe, n_last=1, cascaded=None): """ For initial fetching of the current OHLC. """ if timeframe > 0 and timeframe not in self.GENERATED_TF: logger.error("Timeframe %i is not allowed !" % (timeframe, )) return generators = [] from_tf = timeframe if not market_id in self._last_ohlc: self._last_ohlc[market_id] = {} # compute a from date today = datetime.now().astimezone(UTC()) from_date = today - timedelta(seconds=timeframe * n_last) to_date = today last_ohlcs = {} # cascaded generation of candles if cascaded: for tf in Watcher.GENERATED_TF: if tf > timeframe: # from timeframe greater than initial if tf <= cascaded: # until max cascaded timeframe generators.append(CandleGenerator(from_tf, tf)) from_tf = tf # store for generation last_ohlcs[tf] = [] else: from_tf = tf if timeframe > 0: last_ohlcs[timeframe] = [] n = 0 for data in self.fetch_candles(market_id, timeframe, from_date, to_date, None): # store (int timestamp in ms, str bid, str ofr, str volume) if not self._read_only: Database.inst().store_market_trade( (self.name, market_id, data[0], data[1], data[2], data[3])) candle = Candle(float(data[0]) * 0.001, timeframe) candle.set_bid_ohlc(float(data[1]), float(data[2]), float(data[3]), float(data[4])) candle.set_ofr_ohlc(float(data[5]), float(data[6]), float(data[7]), float(data[8])) candle.set_volume(float(data[9])) if candle.timestamp >= Instrument.basetime(timeframe, time.time()): candle.set_consolidated(False) # current last_ohlcs[timeframe].append(candle) # only the last self._last_ohlc[market_id][timeframe] = candle # generate higher candles for generator in generators: candles = generator.generate_from_candles( last_ohlcs[generator.from_tf], False) if candles: if not self._read_only: for c in candles: self.store_candle(market_id, generator.to_tf, c) last_ohlcs[generator.to_tf].extend(candles) # only the last as current self._last_ohlc[market_id][generator.to_tf] = candles[-1] elif generator.current: self._last_ohlc[market_id][ generator.to_tf] = generator.current # remove consumed candles last_ohlcs[generator.from_tf] = [] n += 1 for k, ohlc in self._last_ohlc[market_id].items(): if ohlc: ohlc.set_consolidated(False)
def do_rebuilder(options): Terminal.inst().info("Starting SIIS rebuilder using %s identity..." % options['identity']) Terminal.inst().flush() # database manager Database.create(options) Database.inst().setup(options) timeframe = -1 cascaded = None if not options.get('timeframe'): timeframe = 60 # default to 1min else: if options['timeframe'] in TIMEFRAME_FROM_STR_MAP: timeframe = TIMEFRAME_FROM_STR_MAP[options['timeframe']] else: try: timeframe = int(options['timeframe']) except: pass if not options.get('cascaded'): cascaded = None else: if options['cascaded'] in TIMEFRAME_FROM_STR_MAP: cascaded = TIMEFRAME_FROM_STR_MAP[options['cascaded']] else: try: cascaded = int(options['cascaded']) except: pass if timeframe < 0: logger.error("Invalid timeframe") sys.exit(-1) from_date = options.get('from') to_date = options.get('to') if not to_date: today = datetime.now().astimezone(UTC()) if timeframe == Instrument.TF_MONTH: to_date = today + timedelta(months=1) else: to_date = today + timedelta(seconds=timeframe) to_date = to_date.replace(microsecond=0) timeframe = options['timeframe'] if timeframe > 0 and timeframe not in GENERATED_TF: logger.error("Timeframe %i is not allowed !" % (timeframe,)) return for market in options['market'].split(','): if market.startswith('!') or market.startswith('*'): continue generators = [] from_tf = timeframe last_ticks = [] last_ohlcs = {} if timeframe == Instrument.TF_TICK: tick_streamer = Database.inst().create_tick_streamer(options['broker'], market, from_date=from_date, to_date=to_date) else: ohlc_streamer = Database.inst().create_ohlc_streamer(options['broker'], market, timeframe, from_date=from_date, to_date=to_date) # cascaded generation of candles if cascaded: for tf in GENERATED_TF: if tf > timeframe: # from timeframe greater than initial if tf <= cascaded: # until max cascaded timeframe generators.append(CandleGenerator(from_tf, tf)) from_tf = tf # store for generation last_ohlcs[tf] = [] else: from_tf = tf if timeframe > 0: last_ohlcs[timeframe] = [] n = 0 t = 0 timestamp = from_date.timestamp() + Instrument.TF_1M if timeframe == 0: while not tick_streamer.finished(): ticks = tick_streamer.next(timestamp) timestamp += Instrument.TF_1M # by step of 1M for data in ticks: if generators: last_ticks.append((float(data[0]) * 0.001, float(data[1]), float(data[2]), float(data[3]))) # generate higher candles for generator in generators: if generator.from_tf == 0: candles = generator.generate_from_ticks(last_ticks) if candles: for c in candles: store_ohlc(options['broker'], market, generator.to_tf, c) last_ohlcs[generator.to_tf] += candles # remove consumed ticks last_ticks = [] else: candles = generator.generate_from_candles(last_ohlcs[generator.from_tf]) if candles: for c in candles: store_ohlc(options['broker'], market, generator.to_tf, c) last_ohlcs[generator.to_tf] += candles # remove consumed candles last_ohlcs[generator.from_tf] = [] n += 1 t += 1 if n == 1000: n = 0 Terminal.inst().info("%i..." % t) Terminal.inst().flush() # calm down the storage of tick, if parsing is faster while Database.inst().num_pending_ticks_storage() > TICK_STORAGE_DELAY: time.sleep(TICK_STORAGE_DELAY) # wait a little before continue logger.info("Read %i trades" % t) elif timeframe > 0: while not ohlc_streamer.finished(): ohlcs = ohlc_streamer.next(timestamp) timestamp += Instrument.TF_1M # by step of 1M for data in ohlcs: if generators: candle = Candle(float(data[0]) * 0.001, timeframe) candle.set_bid_ohlc(float(data[1]), float(data[2]), float(data[3]), float(data[4])) candle.set_ofr_ohlc(float(data[5]), float(data[6]), float(data[7]), float(data[8])) candle.set_volume(float(data[9])) candle.set_consolidated(True) last_ohlcs[timeframe].append(candle) # generate higher candles for generator in generators: candles = generator.generate_from_candles(last_ohlcs[generator.from_tf]) if candles: for c in candles: store_ohlc(options['broker'], market, generator.to_tf, c) last_ohlcs[generator.to_tf].extend(candles) # remove consumed candles last_ohlcs[generator.from_tf] = [] n += 1 t += 1 if n == 1000: n = 0 Terminal.inst().info("%i..." % t) logger.info("Read %i candles" % t) Terminal.inst().info("Flushing database...") Terminal.inst().flush() Database.terminate() Terminal.inst().info("Rebuild done!") Terminal.inst().flush() Terminal.terminate() sys.exit(0)