예제 #1
0
    def __on_kline_data(self, data):
        event_type = data.get('e', '')

        if event_type == 'kline':
            k = data['k']

            symbol = k['s']
            timestamp = k['t'] * 0.001

            tf = self.REV_TF_MAP[k['i']]

            candle = Candle(timestamp, tf)

            # only price, no spread
            candle.set_bid_ohlc(float(k['o']), float(k['h']), float(k['l']),
                                float(k['c']))

            candle.set_ofr_ohlc(float(k['o']), float(k['h']), float(k['l']),
                                float(k['c']))

            candle.set_volume(float(k['v']))
            candle.set_consolidated(k['x'])

            self.service.notify(Signal.SIGNAL_CANDLE_DATA, self.name,
                                (symbol, candle))

            if k['x'] and not self._read_only:
                # write only consolidated candles. values are string its perfect
                Database.inst().store_market_ohlc(
                    (self.name, symbol, int(k['t']), tf, k['o'], k['h'],
                     k['l'], k['c'], k['o'], k['h'], k['l'], k['c'], k['v']))
예제 #2
0
    def update_ohlc(self, market_id, tf, ts, bid, ofr, volume):
        base_time = Instrument.basetime(ts, time.time())

        ended_ohlc = None
        ohlc = None

        # last ohlc per market id
        last_ohlc_by_timeframe = self._last_ohlc.get(market_id)
        if last_ohlc_by_timeframe is None:
            # not found for this market insert it
            self._last_ohlc[market_id] = {tf: None}
            last_ohlc_by_timeframe = self._last_ohlc[market_id]

        if tf not in last_ohlc_by_timeframe:
            last_ohlc_by_timeframe[tf] = None
        else:
            ohlc = last_ohlc_by_timeframe[tf]

        if ohlc and (ohlc.timestamp + tf <= base_time):
            # later tick data (laggy ?)
            if ts < base_time:
                # but after that next in laggy data will be ignored...
                # its not perfect in laggy cases for storage, but in live we can't deals we later data
                self.__update_ohlc(ohlc, bid, ofr, volume)

            # need to close the ohlc and to open a new one
            ohlc.set_consolidated(True)
            ended_ohlc = ohlc

            last_ohlc_by_timeframe[tf] = None
            ohlc = None

        if ohlc is None:
            # open a new one
            ohlc = Candle(base_time, tf)

            ohlc.set_consolidated(False)

            if bid:
                ohlc.set_bid_ohlc(bid, bid, bid, bid)
            if ofr:
                ohlc.set_ofr_ohlc(ofr, ofr, ofr, ofr)

            last_ohlc_by_timeframe[tf] = ohlc

        if ts >= ohlc.timestamp:
            self.__update_ohlc(ohlc, bid, ofr, volume)

        # stored timeframes only
        if ended_ohlc and (tf in self.STORED_TIMEFRAMES):
            Database.inst().store_market_ohlc(
                (self.name, market_id, int(ended_ohlc.timestamp * 1000), tf,
                 ended_ohlc.bid_open, ended_ohlc.bid_high, ended_ohlc.bid_low,
                 ended_ohlc.bid_close, ended_ohlc.ofr_open,
                 ended_ohlc.ofr_high, ended_ohlc.ofr_low, ended_ohlc.ofr_close,
                 ended_ohlc.volume))

        return ohlc
예제 #3
0
class CandleGenerator(object):

    __slots__ = '_from_tf', '_to_tf', '_candle', '_last_timestamp', '_last_consumed'

    def __init__(self, from_tf, to_tf):
        """
        @param to_tf Generated candle time unit.
        """
        if from_tf and (int(to_tf) % int(from_tf) != 0):
            raise (ValueError(
                "From timeframe %s must be an integral divider of to timeframe %s"
                % (from_tf, to_tf)))

        self._from_tf = float(from_tf)
        self._to_tf = float(to_tf)
        self._candle = None
        self._last_timestamp = 0
        self._last_consumed = 0

    @property
    def current(self):
        """
        If exists returns the current non closed candle.
        """
        return self._candle

    @current.setter
    def current(self, candle):
        self._candle = candle

    @property
    def last_timestamp(self):
        return self._last_timestamp

    @property
    def last_consumed(self):
        return self._last_consumed

    @property
    def from_tf(self):
        return self._from_tf

    @property
    def to_tf(self):
        return self._to_tf

    def generate_from_candles(self, from_candles, ignore_non_ended=True):
        """
        Generate as many higher candles as possible from the array of candles given in parameters.
        @note Non ended candles are ignored because it will false the volume.
        """
        to_candles = []
        self._last_consumed = 0

        for from_candle in from_candles:
            to_candle = self.update_from_candle(from_candle)
            if to_candle:
                to_candles.append(to_candle)

            self._last_consumed += 1

        return to_candles

    def generate_from_ticks(self, from_ticks):
        """
        Generate as many higher candles as possible from the array of ticks given in parameters.
        """
        to_candles = []
        self._last_consumed = 0

        for from_tick in from_ticks:
            to_candle = self.update_from_tick(from_tick)
            if to_candle:
                to_candles.append(to_candle)

            self._last_consumed += 1

        return to_candles

    def basetime(self, timestamp):
        if self._to_tf < 7 * 24 * 60 * 60:
            # simplest
            return int(timestamp / self._to_tf) * self._to_tf
        elif self._to_tf == 7 * 24 * 60 * 60:
            # must find the UTC first day of week
            dt = datetime.utcfromtimestamp(timestamp)
            dt = dt.replace(
                hour=0, minute=0, second=0, microsecond=0,
                tzinfo=UTC()) - timedelta(days=dt.weekday())
            return dt.timestamp()
        elif self._to_tf == 30 * 24 * 60 * 60:
            # replace by first day of month at 00h00 UTC
            dt = datetime.utcfromtimestamp(timestamp)
            dt = dt.replace(day=1,
                            hour=0,
                            minute=0,
                            second=0,
                            microsecond=0,
                            tzinfo=UTC())
            return dt.timestamp()

    def update_from_tick(self, from_tick):
        if from_tick is None:
            return None

        if from_tick[0] <= self._last_timestamp:
            # already done (and what if two consecutives ticks have the same timestamp ?)
            return None

        # basetime can be slow, uses only to create a new candle
        # base_time = self.basetime(from_tick[0])
        ended_candle = None

        # if self._candle and self._candle.timestamp+self._to_tf <= base_time:
        if self._candle and from_tick[
                0] >= self._candle.timestamp + self._to_tf:
            # need to close the candle and to open a new one
            self._candle.set_consolidated(True)
            ended_candle = self._candle

            self._candle = None

        if self._candle is None:
            # open a new one
            base_time = self.basetime(from_tick[0])  # from_tick[0] directly ?
            self._candle = Candle(base_time, self._to_tf)

            self._candle.set_consolidated(False)

            # all open, close, low high from the initial candle
            self._candle.set_bid(from_tick[1])
            self._candle.set_ofr(from_tick[2])

        # update volumes
        self._candle._volume += from_tick[3]

        # update bid prices

        # bid high/low
        self._candle._bid_high = max(self._candle._bid_high, from_tick[1])
        self._candle._bid_low = min(self._candle._bid_low, from_tick[1])

        # potential close
        self._candle._bid_close = from_tick[1]

        # update ofr prices

        # ofr high/low
        self._candle._ofr_high = max(self._candle._ofr_high, from_tick[2])
        self._candle._ofr_low = min(self._candle._ofr_low, from_tick[2])

        # potential close
        self._candle._ofr_close = from_tick[2]

        # keep last timestamp
        self._last_timestamp = from_tick[0]

        return ended_candle

    def update_from_candle(self, from_candle):
        """
        From a timeframe, create/update candle to another timeframe, that must be greater and a multiple of.
        Example of creating/updating hourly candle for 1 minute candles.

        Must be called each time a new candle of the lesser timeframe is append.
        It only create the last or update the current candle.

        A non ended candle is ignored because it will false the volume.
        """
        if from_candle is None or not from_candle.ended:
            return None

        if self._from_tf != from_candle.timeframe:
            raise ValueError(
                "From candle must be of time unit %s but %s is provided" %
                (self._from_tf, from_candle.timeframe))

        if from_candle.timestamp <= self._last_timestamp:
            # already done
            return None

        base_time = self.basetime(from_candle.timestamp)
        ended_candle = None

        if self._candle and self._candle.timestamp + self._to_tf <= base_time:
            # need to close the candle and to open a new one
            self._candle.set_consolidated(True)
            ended_candle = self._candle

            self._candle = None

        if self._candle is None:
            # open a new one
            self._candle = Candle(base_time, self._to_tf)

            self._candle.set_consolidated(False)

            # all open, close, low high from the initial candle
            self._candle.copy_bid(from_candle)
            self._candle.copy_ofr(from_candle)

        # update volumes
        self._candle._volume += from_candle.volume

        # update bid prices
        self._candle._bid_high = max(self._candle._bid_high,
                                     from_candle._bid_high)
        self._candle._bid_low = min(self._candle._bid_low,
                                    from_candle._bid_low)

        # potential close
        self._candle._bid_close = from_candle._bid_close

        # update ofr prices
        self._candle._ofr_high = max(self._candle._ofr_high,
                                     from_candle._ofr_high)
        self._candle._ofr_low = min(self._candle._ofr_low,
                                    from_candle._ofr_low)

        # potential close
        self._candle._ofr_close = from_candle._ofr_close

        # keep last timestamp
        self._last_timestamp = from_candle.timestamp

        return ended_candle
예제 #4
0
    def process_ohlc(self):       
        #
        # select market ohlcs
        #

        with self._mutex:
            mks = self._pending_ohlc_select
            self._pending_ohlc_select = []

        if mks:
            try:
                cursor = self._db.cursor()

                for mk in mks:
                    if mk[6]:
                        # last n
                        cursor.execute("""SELECT COUNT(*) FROM ohlc WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s""" % (mk[1], mk[2], mk[3]))
                        count = int(cursor.fetchone()[0])
                        offset = max(0, count - mk[6])

                        # LIMIT should not be necessary then
                        cursor.execute("""SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc
                                        WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s ORDER BY timestamp ASC LIMIT %i OFFSET %i""" % (
                                            mk[1], mk[2], mk[3], mk[6], offset))
                    elif mk[4] and mk[5]:
                        # from to
                        cursor.execute("""SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc
                                        WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s AND timestamp >= %i AND timestamp <= %i ORDER BY timestamp ASC""" % (
                                            mk[1], mk[2], mk[3], mk[4], mk[5]))
                    elif mk[4]:
                        # from to now
                        cursor.execute("""SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc
                                        WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s AND timestamp >= %i ORDER BY timestamp ASC""" % (
                                            mk[1], mk[2], mk[3], mk[4]))
                    elif mk[5]:
                        # to now
                        cursor.execute("""SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc
                                        WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s AND timestamp <= %i ORDER BY timestamp ASC""" % (
                                            mk[1], mk[2], mk[3], mk[5]))
                    else:
                        # all
                        cursor.execute("""SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc
                                        WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s ORDER BY timestamp ASC""" % (
                                            mk[1], mk[2], mk[3]))

                    rows = cursor.fetchall()

                    ohlcs = []

                    for row in rows:
                        timestamp = float(row[0]) * 0.001  # to float second timestamp
                        ohlc = Candle(timestamp, mk[3])

                        ohlc.set_bid_ohlc(float(row[1]), float(row[2]), float(row[3]), float(row[4]))
                        ohlc.set_ofr_ohlc(float(row[5]), float(row[6]), float(row[7]), float(row[8]))

                        # if float(row[9]) <= 0:
                        #   # prefer to ignore empty volume ohlc because it can broke volume signal and it is a no way but it could be
                        #   # a lack of this information like on SPX500 of ig.com. So how to manage that cases...
                        #   continue

                        ohlc.set_volume(float(row[9]))

                        if ohlc.timestamp >= Instrument.basetime(mk[3], time.time()):
                            ohlc.set_consolidated(False)  # current

                        ohlcs.append(ohlc)

                    # notify
                    mk[0].notify(Signal.SIGNAL_CANDLE_DATA_BULK, mk[1], (mk[2], mk[3], ohlcs))
            except self.psycopg2.OperationalError as e:
                self.try_reconnect(e)

                # retry the next time
                with self._mutex:
                    self._pending_ohlc_select = mks + self._pending_ohlc_select
            except Exception as e:
                self.on_error(e)

                # retry the next time
                with self._mutex:
                    self._pending_ohlc_select = mks + self._pending_ohlc_select

        #
        # insert market ohlcs
        #

        if time.time() - self._last_ohlc_flush >= 60 or len(self._pending_ohlc_insert) > 500:
            with self._mutex:
                mkd = self._pending_ohlc_insert
                self._pending_ohlc_insert = []

            if mkd:
                try:
                    cursor = self._db.cursor()

                    elts = []
                    data = set()

                    for mk in mkd:
                        if (mk[0], mk[1], mk[2], mk[3]) not in data:
                            elts.append("('%s', '%s', %i, %i, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % (mk[0], mk[1], mk[2], mk[3], mk[4], mk[5], mk[6], mk[7], mk[8], mk[9], mk[10], mk[11], mk[12]))
                            data.add((mk[0], mk[1], mk[2], mk[3]))

                    query = ' '.join(("INSERT INTO ohlc(broker_id, market_id, timestamp, timeframe, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume) VALUES",
                                ','.join(elts),
                                "ON CONFLICT (broker_id, market_id, timestamp, timeframe) DO UPDATE SET bid_open = EXCLUDED.bid_open, bid_high = EXCLUDED.bid_high, bid_low = EXCLUDED.bid_low, bid_close = EXCLUDED.bid_close, ask_open = EXCLUDED.ask_open, ask_high = EXCLUDED.ask_high, ask_low = EXCLUDED.ask_low, ask_close = EXCLUDED.ask_close, volume = EXCLUDED.volume"))

                    # query = ' '.join((
                    #     "INSERT INTO ohlc(broker_id, market_id, timestamp, timeframe, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume) VALUES",
                    #     ','.join(["('%s', '%s', %i, %i, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % (mk[0], mk[1], mk[2], mk[3], mk[4], mk[5], mk[6], mk[7], mk[8], mk[9], mk[10], mk[11], mk[12]) for mk in mkd]),
                    #     "ON CONFLICT (broker_id, market_id, timestamp, timeframe) DO UPDATE SET bid_open = EXCLUDED.bid_open, bid_high = EXCLUDED.bid_high, bid_low = EXCLUDED.bid_low, bid_close = EXCLUDED.bid_close, ask_open = EXCLUDED.ask_open, ask_high = EXCLUDED.ask_high, ask_low = EXCLUDED.ask_low, ask_close = EXCLUDED.ask_close, volume = EXCLUDED.volume"
                    # ))

                    cursor.execute(query)

                    self._db.commit()
                except self.psycopg2.OperationalError as e:
                    self.try_reconnect(e)

                    # retry the next time
                    with self._mutex:
                        self._pending_ohlc_insert = mkd + self._pending_ohlc_insert
                except Exception as e:
                    self.on_error(e)

                    # retry the next time
                    with self._mutex:
                        self._pending_ohlc_insert = mkd + self._pending_ohlc_insert

                self._last_ohlc_flush = time.time()

        #
        # insert market liquidation
        #

        with self._mutex:
            mkd = self._pending_liquidation_insert
            self._pending_liquidation_insert = []

        if mkd:
            try:
                cursor = self._db.cursor()

                elts = []

                for mk in mkd:
                    elts.append("('%s', '%s', %i, %i, '%s', '%s')" % (mk[0], mk[1], mk[2], mk[3], mk[4], mk[5]))

                # query = ' '.join(("INSERT INTO liquidation(broker_id, market_id, timestamp, direction, price, quantity) VALUES",
                #             ','.join(elts),
                #             "ON CONFLICT (broker_id, market_id, timestamp) DO NOTHING"))
                query = ' '.join(("INSERT INTO liquidation(broker_id, market_id, timestamp, direction, price, quantity) VALUES", ','.join(elts)))

                cursor.execute(query)

                self._db.commit()
            except psycopg2.OperationalError as e:
                self.try_reconnect(e)

                # retry the next time
                with self._mutex:
                    self._pending_liquidation_insert = mkd + self._pending_liquidation_insert
            except Exception as e:
                self.on_error(e)

                # retry the next time
                with self._mutex:
                    self._pending_liquidation_insert = mkd + self._pending_liquidation_insert

        #
        # clean older ohlcs
        #

        if self._autocleanup:
            if time.time() - self._last_ohlc_clean >= OhlcStorage.CLEANUP_DELAY:
                try:
                    now = time.time()
                    cursor = self._db.cursor()

                    for timeframe, timestamp in OhlcStorage.CLEANERS:
                        ts = int(now - timestamp) * 1000
                        # @todo make a count before
                        cursor.execute("DELETE FROM ohlc WHERE timeframe <= %i AND timestamp < %i" % (timeframe, ts))

                    self._db.commit()
                except psycopg2.OperationalError as e:
                    self.try_reconnect(e)
                except Exception as e:
                    self.on_error(e)

                self._last_ohlc_clean = time.time()
예제 #5
0
    def fetch_and_generate(self,
                           market_id,
                           timeframe,
                           from_date=None,
                           to_date=None,
                           n_last=1000,
                           fetch_option="",
                           cascaded=None):
        if timeframe > 0 and timeframe not in self.GENERATED_TF:
            logger.error("Timeframe %i is not allowed !" % (timeframe, ))
            return

        generators = []
        from_tf = timeframe

        self._last_ticks = []
        self._last_ohlcs = {}

        if not from_date and n_last:
            # compute a from date
            today = datetime.now().astimezone(UTC())

            if timeframe >= Instrument.TF_MONTH:
                from_date = (
                    today -
                    timedelta(months=int(timeframe / Instrument.TF_MONTH) *
                              n_last)).replace(day=1).replace(hour=0).replace(
                                  minute=0).replace(second=0)
            elif timeframe >= Instrument.TF_1D:
                from_date = (today - timedelta(
                    days=int(timeframe / Instrument.TF_1D) * n_last)).replace(
                        hour=0).replace(minute=0).replace(second=0)
            elif timeframe >= Instrument.TF_1H:
                from_date = (today - timedelta(
                    hours=int(timeframe / Instrument.TF_1H) * n_last)).replace(
                        minute=0).replace(second=0)
            elif timeframe >= Instrument.TF_1M:
                from_date = (
                    today -
                    timedelta(minutes=int(timeframe / Instrument.TF_1M) *
                              n_last)).replace(second=0)
            elif timeframe >= Instrument.TF_1S:
                from_date = (today - timedelta(
                    seconds=int(timeframe / Instrument.TF_1S) * n_last))

            from_date = from_date.replace(microsecond=0)

        if not to_date:
            today = datetime.now().astimezone(UTC())

            if timeframe == Instrument.TF_MONTH:
                to_date = today + timedelta(months=1)
            else:
                to_date = today + timedelta(seconds=timeframe)

            to_date = to_date.replace(microsecond=0)

        # cascaded generation of candles
        if cascaded:
            for tf in Fetcher.GENERATED_TF:
                if tf > timeframe:
                    # from timeframe greater than initial
                    if tf <= cascaded:
                        # until max cascaded timeframe
                        generators.append(CandleGenerator(from_tf, tf))
                        from_tf = tf

                        # store for generation
                        self._last_ohlcs[tf] = []
                else:
                    from_tf = tf

        if timeframe > 0:
            self._last_ohlcs[timeframe] = []

        n = 0
        t = 0

        if timeframe == 0:
            for data in self.fetch_trades(market_id, from_date, to_date, None):
                # store (int timestamp in ms, str bid, str ofr, str volume)
                Database.inst().store_market_trade(
                    (self.name, market_id, data[0], data[1], data[2], data[3]))

                if generators:
                    self._last_ticks.append(
                        (float(data[0]) * 0.001, float(data[1]),
                         float(data[2]), float(data[3])))

                # generate higher candles
                for generator in generators:
                    if generator.from_tf == 0:
                        candles = generator.generate_from_ticks(
                            self._last_ticks)

                        if candles:
                            for c in candles:
                                self.store_candle(market_id, generator.to_tf,
                                                  c)

                            self._last_ohlcs[generator.to_tf] += candles

                        # remove consumed ticks
                        self._last_ticks = []
                    else:
                        candles = generator.generate_from_candles(
                            self._last_ohlcs[generator.from_tf])

                        if candles:
                            for c in candles:
                                self.store_candle(market_id, generator.to_tf,
                                                  c)

                            self._last_ohlcs[generator.to_tf] += candles

                        # remove consumed candles
                        self._last_ohlcs[generator.from_tf] = []

                n += 1
                t += 1

                if n == 10000:
                    n = 0
                    Terminal.inst().info("%i trades for %s..." %
                                         (t, market_id))

                # calm down the storage of tick, if parsing is faster
                while Database.inst().num_pending_ticks_storage(
                ) > Fetcher.MAX_PENDING_TICK:
                    time.sleep(Fetcher.TICK_STORAGE_DELAY
                               )  # wait a little before continue

            logger.info("Fetched %i trades for %s" % (t, market_id))

        elif timeframe > 0:
            for data in self.fetch_candles(market_id, timeframe, from_date,
                                           to_date, None):
                # store (int timestamp ms, str open bid, high bid, low bid, close bid, open ofr, high ofr, low ofr, close ofr, volume)
                Database.inst().store_market_ohlc(
                    (self.name, market_id, data[0], int(timeframe), data[1],
                     data[2], data[3], data[4], data[5], data[6], data[7],
                     data[8], data[9]))

                if generators:
                    candle = Candle(float(data[0]) * 0.001, timeframe)

                    candle.set_bid_ohlc(float(data[1]), float(data[2]),
                                        float(data[3]), float(data[4]))
                    candle.set_ofr_ohlc(float(data[5]), float(data[6]),
                                        float(data[7]), float(data[8]))

                    candle.set_volume(float(data[9]))
                    candle.set_consolidated(True)

                    self._last_ohlcs[timeframe].append(candle)

                # generate higher candles
                for generator in generators:
                    candles = generator.generate_from_candles(
                        self._last_ohlcs[generator.from_tf])
                    if candles:
                        for c in candles:
                            self.store_candle(market_id, generator.to_tf, c)

                        self._last_ohlcs[generator.to_tf].extend(candles)

                    # remove consumed candles
                    self._last_ohlcs[generator.from_tf] = []

                n += 1
                t += 1

                if n == 1000:
                    n = 0
                    Terminal.inst().info(
                        "%i candles for %s in %s..." %
                        (t, market_id, timeframe_to_str(timeframe)))

            logger.info("Fetched %i candles for %s in %s" %
                        (t, market_id, timeframe_to_str(timeframe)))
예제 #6
0
    def fetch_and_generate(self,
                           market_id,
                           timeframe,
                           n_last=1,
                           cascaded=None):
        """
        For initial fetching of the current OHLC.
        """
        if timeframe > 0 and timeframe not in self.GENERATED_TF:
            logger.error("Timeframe %i is not allowed !" % (timeframe, ))
            return

        generators = []
        from_tf = timeframe

        if not market_id in self._last_ohlc:
            self._last_ohlc[market_id] = {}

        # compute a from date
        today = datetime.now().astimezone(UTC())
        from_date = today - timedelta(seconds=timeframe * n_last)
        to_date = today

        last_ohlcs = {}

        # cascaded generation of candles
        if cascaded:
            for tf in Watcher.GENERATED_TF:
                if tf > timeframe:
                    # from timeframe greater than initial
                    if tf <= cascaded:
                        # until max cascaded timeframe
                        generators.append(CandleGenerator(from_tf, tf))
                        from_tf = tf

                        # store for generation
                        last_ohlcs[tf] = []
                else:
                    from_tf = tf

        if timeframe > 0:
            last_ohlcs[timeframe] = []

        n = 0

        for data in self.fetch_candles(market_id, timeframe, from_date,
                                       to_date, None):
            # store (int timestamp in ms, str bid, str ofr, str volume)
            if not self._read_only:
                Database.inst().store_market_trade(
                    (self.name, market_id, data[0], data[1], data[2], data[3]))

            candle = Candle(float(data[0]) * 0.001, timeframe)

            candle.set_bid_ohlc(float(data[1]), float(data[2]), float(data[3]),
                                float(data[4]))
            candle.set_ofr_ohlc(float(data[5]), float(data[6]), float(data[7]),
                                float(data[8]))

            candle.set_volume(float(data[9]))

            if candle.timestamp >= Instrument.basetime(timeframe, time.time()):
                candle.set_consolidated(False)  # current

            last_ohlcs[timeframe].append(candle)

            # only the last
            self._last_ohlc[market_id][timeframe] = candle

            # generate higher candles
            for generator in generators:
                candles = generator.generate_from_candles(
                    last_ohlcs[generator.from_tf], False)
                if candles:
                    if not self._read_only:
                        for c in candles:
                            self.store_candle(market_id, generator.to_tf, c)

                    last_ohlcs[generator.to_tf].extend(candles)

                    # only the last as current
                    self._last_ohlc[market_id][generator.to_tf] = candles[-1]

                elif generator.current:
                    self._last_ohlc[market_id][
                        generator.to_tf] = generator.current

                # remove consumed candles
                last_ohlcs[generator.from_tf] = []

            n += 1

        for k, ohlc in self._last_ohlc[market_id].items():
            if ohlc:
                ohlc.set_consolidated(False)
예제 #7
0
    def update_ohlc(self, market_id, tf, ts, bid, ofr, volume):
        """
        Update the current OHLC or create a new one, and save them.
        @param market_id str Unique market identifier
        @param tf float Timeframe (normalized timeframe at second)
        @param ts float Timestamp of the update or of the tick/trade
        @param bid float Bid price.
        @param ofr float Offer/ask price.
        @param volume float Volume transacted or 0 if unspecified.
        """
        ended_ohlc = None
        ohlc = None

        # last ohlc per market id
        last_ohlc_by_timeframe = self._last_ohlc.get(market_id)
        if last_ohlc_by_timeframe is None:
            # not found for this market insert it
            self._last_ohlc[market_id] = {tf: None}
            last_ohlc_by_timeframe = self._last_ohlc[market_id]

        if tf not in last_ohlc_by_timeframe:
            last_ohlc_by_timeframe[tf] = None
        else:
            ohlc = last_ohlc_by_timeframe[tf]

        if ohlc and ts >= ohlc.timestamp + tf:
            # need to close the current ohlc
            ohlc.set_consolidated(True)
            ended_ohlc = ohlc

            last_ohlc_by_timeframe[tf] = None
            ohlc = None

        if ohlc is None:
            # open a new one if necessary
            base_time = Instrument.basetime(tf, ts)
            ohlc = Candle(base_time, tf)

            ohlc.set_consolidated(False)

            if bid:
                ohlc.set_bid(bid)
            if ofr:
                ohlc.set_ofr(ofr)

            last_ohlc_by_timeframe[tf] = ohlc

        if ts >= ohlc.timestamp:
            # update the current OHLC
            if volume:
                ohlc._volume += volume

            if bid:
                if not ohlc._bid_open:
                    ohlc.set_bid(bid)

                # update bid prices
                ohlc._bid_high = max(ohlc._bid_high, bid)
                ohlc._bid_low = min(ohlc._bid_low, bid)

                # potential close
                ohlc._bid_close = bid

            if ofr:
                if not ohlc.ofr_open:
                    ohlc.set_ofr(ofr)

                # update ofr prices
                ohlc._ofr_high = max(ohlc._ofr_high, ofr)
                ohlc._ofr_low = min(ohlc._ofr_low, ofr)

                # potential close
                ohlc._ofr_close = ofr

        # stored timeframes only
        if ended_ohlc and (tf in self.STORED_TIMEFRAMES):
            Database.inst().store_market_ohlc(
                (self.name, market_id, int(ended_ohlc.timestamp * 1000), tf,
                 ended_ohlc.bid_open, ended_ohlc.bid_high, ended_ohlc.bid_low,
                 ended_ohlc.bid_close, ended_ohlc.ofr_open,
                 ended_ohlc.ofr_high, ended_ohlc.ofr_low, ended_ohlc.ofr_close,
                 ended_ohlc.volume))

        return ohlc
예제 #8
0
파일: rebuilder.py 프로젝트: rptrk/siis
def do_rebuilder(options):
    Terminal.inst().info("Starting SIIS rebuilder using %s identity..." % options['identity'])
    Terminal.inst().flush()

    # database manager
    Database.create(options)
    Database.inst().setup(options)

    timeframe = -1
    cascaded = None

    if not options.get('timeframe'):
        timeframe = 60  # default to 1min
    else:
        if options['timeframe'] in TIMEFRAME_FROM_STR_MAP:
            timeframe = TIMEFRAME_FROM_STR_MAP[options['timeframe']]
        else:
            try:
                timeframe = int(options['timeframe'])
            except:
                pass

    if not options.get('cascaded'):
        cascaded = None
    else:
        if options['cascaded'] in TIMEFRAME_FROM_STR_MAP:
            cascaded = TIMEFRAME_FROM_STR_MAP[options['cascaded']]
        else:
            try:
                cascaded = int(options['cascaded'])
            except:
                pass

    if timeframe < 0:
        logger.error("Invalid timeframe")
        sys.exit(-1)

    from_date = options.get('from')
    to_date = options.get('to')

    if not to_date:
        today = datetime.now().astimezone(UTC())

        if timeframe == Instrument.TF_MONTH:
            to_date = today + timedelta(months=1)
        else:
            to_date = today + timedelta(seconds=timeframe)

        to_date = to_date.replace(microsecond=0)

    timeframe = options['timeframe']

    if timeframe > 0 and timeframe not in GENERATED_TF:
        logger.error("Timeframe %i is not allowed !" % (timeframe,))
        return

    for market in options['market'].split(','):
        if market.startswith('!') or market.startswith('*'):
            continue

        generators = []
        from_tf = timeframe

        last_ticks = []
        last_ohlcs = {}

        if timeframe == Instrument.TF_TICK:
            tick_streamer = Database.inst().create_tick_streamer(options['broker'], market, from_date=from_date, to_date=to_date)
        else:
            ohlc_streamer = Database.inst().create_ohlc_streamer(options['broker'], market, timeframe, from_date=from_date, to_date=to_date)
    
        # cascaded generation of candles
        if cascaded:
            for tf in GENERATED_TF:
                if tf > timeframe:
                    # from timeframe greater than initial
                    if tf <= cascaded:
                        # until max cascaded timeframe
                        generators.append(CandleGenerator(from_tf, tf))
                        from_tf = tf

                        # store for generation
                        last_ohlcs[tf] = []
                else:
                    from_tf = tf

        if timeframe > 0:
            last_ohlcs[timeframe] = []

        n = 0
        t = 0

        timestamp = from_date.timestamp() + Instrument.TF_1M

        if timeframe == 0:
            while not tick_streamer.finished():
                ticks = tick_streamer.next(timestamp)
                timestamp += Instrument.TF_1M  # by step of 1M

                for data in ticks:
                    if generators:
                        last_ticks.append((float(data[0]) * 0.001, float(data[1]), float(data[2]), float(data[3])))

                    # generate higher candles
                    for generator in generators:
                        if generator.from_tf == 0:
                            candles = generator.generate_from_ticks(last_ticks)

                            if candles:
                                for c in candles:
                                    store_ohlc(options['broker'], market, generator.to_tf, c)

                                last_ohlcs[generator.to_tf] += candles

                            # remove consumed ticks
                            last_ticks = []
                        else:
                            candles = generator.generate_from_candles(last_ohlcs[generator.from_tf])

                            if candles:
                                for c in candles:
                                    store_ohlc(options['broker'], market, generator.to_tf, c)

                                last_ohlcs[generator.to_tf] += candles

                            # remove consumed candles
                            last_ohlcs[generator.from_tf] = []

                    n += 1
                    t += 1

                    if n == 1000:
                        n = 0
                        Terminal.inst().info("%i..." % t)
                        Terminal.inst().flush()

                        # calm down the storage of tick, if parsing is faster
                        while Database.inst().num_pending_ticks_storage() > TICK_STORAGE_DELAY:
                            time.sleep(TICK_STORAGE_DELAY)  # wait a little before continue

            logger.info("Read %i trades" % t)

        elif timeframe > 0:
            while not ohlc_streamer.finished():
                ohlcs = ohlc_streamer.next(timestamp)
                timestamp += Instrument.TF_1M  # by step of 1M

                for data in ohlcs:
                    if generators:
                        candle = Candle(float(data[0]) * 0.001, timeframe)

                        candle.set_bid_ohlc(float(data[1]), float(data[2]), float(data[3]), float(data[4]))
                        candle.set_ofr_ohlc(float(data[5]), float(data[6]), float(data[7]), float(data[8]))

                        candle.set_volume(float(data[9]))
                        candle.set_consolidated(True)

                        last_ohlcs[timeframe].append(candle)

                    # generate higher candles
                    for generator in generators:
                        candles = generator.generate_from_candles(last_ohlcs[generator.from_tf])
                        if candles:
                            for c in candles:
                                store_ohlc(options['broker'], market, generator.to_tf, c)

                            last_ohlcs[generator.to_tf].extend(candles)

                        # remove consumed candles
                        last_ohlcs[generator.from_tf] = []

                    n += 1
                    t += 1

                    if n == 1000:
                        n = 0
                        Terminal.inst().info("%i..." % t)

            logger.info("Read %i candles" % t)

    Terminal.inst().info("Flushing database...")
    Terminal.inst().flush() 

    Database.terminate()

    Terminal.inst().info("Rebuild done!")
    Terminal.inst().flush()

    Terminal.terminate()
    sys.exit(0)