Example #1
0
    def update_ohlc(self, market_id, tf, ts, bid, ofr, volume):
        base_time = Instrument.basetime(ts, time.time())

        ended_ohlc = None
        ohlc = None

        # last ohlc per market id
        last_ohlc_by_timeframe = self._last_ohlc.get(market_id)
        if last_ohlc_by_timeframe is None:
            # not found for this market insert it
            self._last_ohlc[market_id] = {tf: None}
            last_ohlc_by_timeframe = self._last_ohlc[market_id]

        if tf not in last_ohlc_by_timeframe:
            last_ohlc_by_timeframe[tf] = None
        else:
            ohlc = last_ohlc_by_timeframe[tf]

        if ohlc and (ohlc.timestamp + tf <= base_time):
            # later tick data (laggy ?)
            if ts < base_time:
                # but after that next in laggy data will be ignored...
                # its not perfect in laggy cases for storage, but in live we can't deals we later data
                self.__update_ohlc(ohlc, bid, ofr, volume)

            # need to close the ohlc and to open a new one
            ohlc.set_consolidated(True)
            ended_ohlc = ohlc

            last_ohlc_by_timeframe[tf] = None
            ohlc = None

        if ohlc is None:
            # open a new one
            ohlc = Candle(base_time, tf)

            ohlc.set_consolidated(False)

            if bid:
                ohlc.set_bid_ohlc(bid, bid, bid, bid)
            if ofr:
                ohlc.set_ofr_ohlc(ofr, ofr, ofr, ofr)

            last_ohlc_by_timeframe[tf] = ohlc

        if ts >= ohlc.timestamp:
            self.__update_ohlc(ohlc, bid, ofr, volume)

        # stored timeframes only
        if ended_ohlc and (tf in self.STORED_TIMEFRAMES):
            Database.inst().store_market_ohlc(
                (self.name, market_id, int(ended_ohlc.timestamp * 1000), tf,
                 ended_ohlc.bid_open, ended_ohlc.bid_high, ended_ohlc.bid_low,
                 ended_ohlc.bid_close, ended_ohlc.ofr_open,
                 ended_ohlc.ofr_high, ended_ohlc.ofr_low, ended_ohlc.ofr_close,
                 ended_ohlc.volume))

        return ohlc
Example #2
0
    def update_from_tick(self):
        """
        During update processing, close OHLCs if not tick data arrive before.
        Then notify a signal if a ohlc is generated (and closed).
        """
        now = time.time()

        for tf in self.GENERATED_TF:
            # only if current base time is greater than the previous
            base_time = Instrument.basetime(tf, now)
            if base_time > self._last_update_times[tf]:
                self._last_update_times[tf] = base_time

                for market_id, last_ohlc_by_timeframe in self._last_ohlc.items(
                ):
                    if last_ohlc_by_timeframe:
                        ohlc = self.close_ohlc(market_id,
                                               last_ohlc_by_timeframe, tf, now)
                        if ohlc:
                            self.service.notify(Signal.SIGNAL_CANDLE_DATA,
                                                self.name, (market_id, ohlc))
Example #3
0
    def process_ohlc(self):       
        #
        # select market ohlcs
        #

        with self._mutex:
            mks = self._pending_ohlc_select
            self._pending_ohlc_select = []

        if mks:
            try:
                cursor = self._db.cursor()

                for mk in mks:
                    if mk[6]:
                        # last n
                        cursor.execute("""SELECT COUNT(*) FROM ohlc WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s""" % (mk[1], mk[2], mk[3]))
                        count = int(cursor.fetchone()[0])
                        offset = max(0, count - mk[6])

                        # LIMIT should not be necessary then
                        cursor.execute("""SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc
                                        WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s ORDER BY timestamp ASC LIMIT %i OFFSET %i""" % (
                                            mk[1], mk[2], mk[3], mk[6], offset))
                    elif mk[4] and mk[5]:
                        # from to
                        cursor.execute("""SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc
                                        WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s AND timestamp >= %i AND timestamp <= %i ORDER BY timestamp ASC""" % (
                                            mk[1], mk[2], mk[3], mk[4], mk[5]))
                    elif mk[4]:
                        # from to now
                        cursor.execute("""SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc
                                        WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s AND timestamp >= %i ORDER BY timestamp ASC""" % (
                                            mk[1], mk[2], mk[3], mk[4]))
                    elif mk[5]:
                        # to now
                        cursor.execute("""SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc
                                        WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s AND timestamp <= %i ORDER BY timestamp ASC""" % (
                                            mk[1], mk[2], mk[3], mk[5]))
                    else:
                        # all
                        cursor.execute("""SELECT timestamp, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume FROM ohlc
                                        WHERE broker_id = '%s' AND market_id = '%s' AND timeframe = %s ORDER BY timestamp ASC""" % (
                                            mk[1], mk[2], mk[3]))

                    rows = cursor.fetchall()

                    ohlcs = []

                    for row in rows:
                        timestamp = float(row[0]) * 0.001  # to float second timestamp
                        ohlc = Candle(timestamp, mk[3])

                        ohlc.set_bid_ohlc(float(row[1]), float(row[2]), float(row[3]), float(row[4]))
                        ohlc.set_ofr_ohlc(float(row[5]), float(row[6]), float(row[7]), float(row[8]))

                        # if float(row[9]) <= 0:
                        #   # prefer to ignore empty volume ohlc because it can broke volume signal and it is a no way but it could be
                        #   # a lack of this information like on SPX500 of ig.com. So how to manage that cases...
                        #   continue

                        ohlc.set_volume(float(row[9]))

                        if ohlc.timestamp >= Instrument.basetime(mk[3], time.time()):
                            ohlc.set_consolidated(False)  # current

                        ohlcs.append(ohlc)

                    # notify
                    mk[0].notify(Signal.SIGNAL_CANDLE_DATA_BULK, mk[1], (mk[2], mk[3], ohlcs))
            except self.psycopg2.OperationalError as e:
                self.try_reconnect(e)

                # retry the next time
                with self._mutex:
                    self._pending_ohlc_select = mks + self._pending_ohlc_select
            except Exception as e:
                self.on_error(e)

                # retry the next time
                with self._mutex:
                    self._pending_ohlc_select = mks + self._pending_ohlc_select

        #
        # insert market ohlcs
        #

        if time.time() - self._last_ohlc_flush >= 60 or len(self._pending_ohlc_insert) > 500:
            with self._mutex:
                mkd = self._pending_ohlc_insert
                self._pending_ohlc_insert = []

            if mkd:
                try:
                    cursor = self._db.cursor()

                    elts = []
                    data = set()

                    for mk in mkd:
                        if (mk[0], mk[1], mk[2], mk[3]) not in data:
                            elts.append("('%s', '%s', %i, %i, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % (mk[0], mk[1], mk[2], mk[3], mk[4], mk[5], mk[6], mk[7], mk[8], mk[9], mk[10], mk[11], mk[12]))
                            data.add((mk[0], mk[1], mk[2], mk[3]))

                    query = ' '.join(("INSERT INTO ohlc(broker_id, market_id, timestamp, timeframe, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume) VALUES",
                                ','.join(elts),
                                "ON CONFLICT (broker_id, market_id, timestamp, timeframe) DO UPDATE SET bid_open = EXCLUDED.bid_open, bid_high = EXCLUDED.bid_high, bid_low = EXCLUDED.bid_low, bid_close = EXCLUDED.bid_close, ask_open = EXCLUDED.ask_open, ask_high = EXCLUDED.ask_high, ask_low = EXCLUDED.ask_low, ask_close = EXCLUDED.ask_close, volume = EXCLUDED.volume"))

                    # query = ' '.join((
                    #     "INSERT INTO ohlc(broker_id, market_id, timestamp, timeframe, bid_open, bid_high, bid_low, bid_close, ask_open, ask_high, ask_low, ask_close, volume) VALUES",
                    #     ','.join(["('%s', '%s', %i, %i, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % (mk[0], mk[1], mk[2], mk[3], mk[4], mk[5], mk[6], mk[7], mk[8], mk[9], mk[10], mk[11], mk[12]) for mk in mkd]),
                    #     "ON CONFLICT (broker_id, market_id, timestamp, timeframe) DO UPDATE SET bid_open = EXCLUDED.bid_open, bid_high = EXCLUDED.bid_high, bid_low = EXCLUDED.bid_low, bid_close = EXCLUDED.bid_close, ask_open = EXCLUDED.ask_open, ask_high = EXCLUDED.ask_high, ask_low = EXCLUDED.ask_low, ask_close = EXCLUDED.ask_close, volume = EXCLUDED.volume"
                    # ))

                    cursor.execute(query)

                    self._db.commit()
                except self.psycopg2.OperationalError as e:
                    self.try_reconnect(e)

                    # retry the next time
                    with self._mutex:
                        self._pending_ohlc_insert = mkd + self._pending_ohlc_insert
                except Exception as e:
                    self.on_error(e)

                    # retry the next time
                    with self._mutex:
                        self._pending_ohlc_insert = mkd + self._pending_ohlc_insert

                self._last_ohlc_flush = time.time()

        #
        # insert market liquidation
        #

        with self._mutex:
            mkd = self._pending_liquidation_insert
            self._pending_liquidation_insert = []

        if mkd:
            try:
                cursor = self._db.cursor()

                elts = []

                for mk in mkd:
                    elts.append("('%s', '%s', %i, %i, '%s', '%s')" % (mk[0], mk[1], mk[2], mk[3], mk[4], mk[5]))

                # query = ' '.join(("INSERT INTO liquidation(broker_id, market_id, timestamp, direction, price, quantity) VALUES",
                #             ','.join(elts),
                #             "ON CONFLICT (broker_id, market_id, timestamp) DO NOTHING"))
                query = ' '.join(("INSERT INTO liquidation(broker_id, market_id, timestamp, direction, price, quantity) VALUES", ','.join(elts)))

                cursor.execute(query)

                self._db.commit()
            except psycopg2.OperationalError as e:
                self.try_reconnect(e)

                # retry the next time
                with self._mutex:
                    self._pending_liquidation_insert = mkd + self._pending_liquidation_insert
            except Exception as e:
                self.on_error(e)

                # retry the next time
                with self._mutex:
                    self._pending_liquidation_insert = mkd + self._pending_liquidation_insert

        #
        # clean older ohlcs
        #

        if self._autocleanup:
            if time.time() - self._last_ohlc_clean >= OhlcStorage.CLEANUP_DELAY:
                try:
                    now = time.time()
                    cursor = self._db.cursor()

                    for timeframe, timestamp in OhlcStorage.CLEANERS:
                        ts = int(now - timestamp) * 1000
                        # @todo make a count before
                        cursor.execute("DELETE FROM ohlc WHERE timeframe <= %i AND timestamp < %i" % (timeframe, ts))

                    self._db.commit()
                except psycopg2.OperationalError as e:
                    self.try_reconnect(e)
                except Exception as e:
                    self.on_error(e)

                self._last_ohlc_clean = time.time()
Example #4
0
    def fetch_and_generate(self,
                           market_id,
                           timeframe,
                           n_last=1,
                           cascaded=None):
        """
        For initial fetching of the current OHLC.
        """
        if timeframe > 0 and timeframe not in self.GENERATED_TF:
            logger.error("Timeframe %i is not allowed !" % (timeframe, ))
            return

        generators = []
        from_tf = timeframe

        if not market_id in self._last_ohlc:
            self._last_ohlc[market_id] = {}

        # compute a from date
        today = datetime.now().astimezone(UTC())
        from_date = today - timedelta(seconds=timeframe * n_last)
        to_date = today

        last_ohlcs = {}

        # cascaded generation of candles
        if cascaded:
            for tf in Watcher.GENERATED_TF:
                if tf > timeframe:
                    # from timeframe greater than initial
                    if tf <= cascaded:
                        # until max cascaded timeframe
                        generators.append(CandleGenerator(from_tf, tf))
                        from_tf = tf

                        # store for generation
                        last_ohlcs[tf] = []
                else:
                    from_tf = tf

        if timeframe > 0:
            last_ohlcs[timeframe] = []

        n = 0

        for data in self.fetch_candles(market_id, timeframe, from_date,
                                       to_date, None):
            # store (int timestamp in ms, str bid, str ofr, str volume)
            if not self._read_only:
                Database.inst().store_market_trade(
                    (self.name, market_id, data[0], data[1], data[2], data[3]))

            candle = Candle(float(data[0]) * 0.001, timeframe)

            candle.set_bid_ohlc(float(data[1]), float(data[2]), float(data[3]),
                                float(data[4]))
            candle.set_ofr_ohlc(float(data[5]), float(data[6]), float(data[7]),
                                float(data[8]))

            candle.set_volume(float(data[9]))

            if candle.timestamp >= Instrument.basetime(timeframe, time.time()):
                candle.set_consolidated(False)  # current

            last_ohlcs[timeframe].append(candle)

            # only the last
            self._last_ohlc[market_id][timeframe] = candle

            # generate higher candles
            for generator in generators:
                candles = generator.generate_from_candles(
                    last_ohlcs[generator.from_tf], False)
                if candles:
                    if not self._read_only:
                        for c in candles:
                            self.store_candle(market_id, generator.to_tf, c)

                    last_ohlcs[generator.to_tf].extend(candles)

                    # only the last as current
                    self._last_ohlc[market_id][generator.to_tf] = candles[-1]

                elif generator.current:
                    self._last_ohlc[market_id][
                        generator.to_tf] = generator.current

                # remove consumed candles
                last_ohlcs[generator.from_tf] = []

            n += 1

        for k, ohlc in self._last_ohlc[market_id].items():
            if ohlc:
                ohlc.set_consolidated(False)
Example #5
0
    def update_ohlc(self, market_id, tf, ts, bid, ofr, volume):
        """
        Update the current OHLC or create a new one, and save them.
        @param market_id str Unique market identifier
        @param tf float Timeframe (normalized timeframe at second)
        @param ts float Timestamp of the update or of the tick/trade
        @param bid float Bid price.
        @param ofr float Offer/ask price.
        @param volume float Volume transacted or 0 if unspecified.
        """
        ended_ohlc = None
        ohlc = None

        # last ohlc per market id
        last_ohlc_by_timeframe = self._last_ohlc.get(market_id)
        if last_ohlc_by_timeframe is None:
            # not found for this market insert it
            self._last_ohlc[market_id] = {tf: None}
            last_ohlc_by_timeframe = self._last_ohlc[market_id]

        if tf not in last_ohlc_by_timeframe:
            last_ohlc_by_timeframe[tf] = None
        else:
            ohlc = last_ohlc_by_timeframe[tf]

        if ohlc and ts >= ohlc.timestamp + tf:
            # need to close the current ohlc
            ohlc.set_consolidated(True)
            ended_ohlc = ohlc

            last_ohlc_by_timeframe[tf] = None
            ohlc = None

        if ohlc is None:
            # open a new one if necessary
            base_time = Instrument.basetime(tf, ts)
            ohlc = Candle(base_time, tf)

            ohlc.set_consolidated(False)

            if bid:
                ohlc.set_bid(bid)
            if ofr:
                ohlc.set_ofr(ofr)

            last_ohlc_by_timeframe[tf] = ohlc

        if ts >= ohlc.timestamp:
            # update the current OHLC
            if volume:
                ohlc._volume += volume

            if bid:
                if not ohlc._bid_open:
                    ohlc.set_bid(bid)

                # update bid prices
                ohlc._bid_high = max(ohlc._bid_high, bid)
                ohlc._bid_low = min(ohlc._bid_low, bid)

                # potential close
                ohlc._bid_close = bid

            if ofr:
                if not ohlc.ofr_open:
                    ohlc.set_ofr(ofr)

                # update ofr prices
                ohlc._ofr_high = max(ohlc._ofr_high, ofr)
                ohlc._ofr_low = min(ohlc._ofr_low, ofr)

                # potential close
                ohlc._ofr_close = ofr

        # stored timeframes only
        if ended_ohlc and (tf in self.STORED_TIMEFRAMES):
            Database.inst().store_market_ohlc(
                (self.name, market_id, int(ended_ohlc.timestamp * 1000), tf,
                 ended_ohlc.bid_open, ended_ohlc.bid_high, ended_ohlc.bid_low,
                 ended_ohlc.bid_close, ended_ohlc.ofr_open,
                 ended_ohlc.ofr_high, ended_ohlc.ofr_low, ended_ohlc.ofr_close,
                 ended_ohlc.volume))

        return ohlc
Example #6
0
 def basetime(self):
     """
     Related candle base time of the timestamp of the signal.
     """
     return Instrument.basetime(self.timeframe, self.ts)
Example #7
0
    def compute(self, timestamp, timestamps, high, low, close):
        size = len(close)

        basis = ta_SMA(close, self._length)
        atrs = ta_ATR(high, low, close, timeperiod=self._length)

        dev = atrs * self._coeff
        upper = basis + dev
        lower = basis - dev

        with np.errstate(divide='ignore', invalid='ignore'):
            # replace by 0 when divisor is 0
            bbr = (close - lower) / (upper - lower)
            bbr[bbr == np.inf] = 0.0

        bbe = ta_EMA(bbr, self._length_MA)

        if len(self._tup) != size:
            self._tup = np.zeros(size)
            self._tdn = np.zeros(size)

            self._tup[0] = self._tup[1] = np.NaN
            self._tdn[0] = self._tdn[1] = np.NaN

        for i in range(2, size):
            if bbe[i - 1] > bbe[i] and bbe[i - 2] < bbe[i - 1]:
                self._tup[i] = bbe[i]
            else:
                self._tup[i] = np.NaN

        for i in range(2, size):
            if bbe[i - 1] < bbe[i] and bbe[i - 2] > bbe[i - 1]:
                self._tdn[i] = bbe[i]
            else:
                self._tdn[i] = np.NaN

        highest = ta_MAX(high, 3)
        lowest = ta_MIN(low, 3)

        last_up = 0.0
        last_dn = 0.0

        for i in range(2, size):
            if not np.isnan(self._tup[i]):
                last_up = self._tup[i] = highest[i]
            elif last_up > 0.0:
                self._tup[i] = last_up

            if not np.isnan(self._tdn[i]):
                last_dn = self._tdn[i] = lowest[i]
            elif last_dn > 0.0:
                self._tdn[i] = last_dn

        # logger.debug("%s %s %s" % (self._tdn[-3], self._tdn[-2], self._tdn[-1]))

        # logger.debug("%s - %s %s / %s %s %s /  %s %s %s / %s %s %s / %s %s %s" % (
        #     datetime.utcfromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S'),
        #     datetime.utcfromtimestamp(timestamps[-2]).strftime('%Y-%m-%d %H:%M:%S'),
        #     datetime.utcfromtimestamp(timestamps[-1]).strftime('%Y-%m-%d %H:%M:%S'),
        #     self._tdn[-3], self._tdn[-2], self._tdn[-1], close[-3], close[-2], close[-1], low[-3], low[-2], low[-1], lowest[-3], lowest[-2], lowest[-1]))

        # compact timeserie
        from_timestamp = Instrument.basetime(self.timeframe,
                                             self._last_timestamp)  # inclusive
        to_timestamp = Instrument.basetime(self.timeframe,
                                           timestamp)  # exclusive

        delta = min(
            int((to_timestamp - from_timestamp) / self._timeframe) + 1,
            len(timestamps))

        # base index
        num = len(timestamps)

        last_up = self._up[-1] if len(self._up) else np.NaN
        last_dn = self._down[-1] if len(self._down) else np.NaN

        # if len(self._tdn) and not np.isnan(self._tdn[-1]) and self._tdn[-1] != last_dn:
        #     self._down.append(self._tdn[-1])
        #     last_dn = self._tdn[-1]
        #     logger.info("> %s %s" % (datetime.utcfromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S'), last_dn))

        # if len(self._tup) and not np.isnan(self._tup[-1]) and self._tup[-1] != last_up:
        #     self._up.append(self._tup[-1])

        for b in range(num - delta, num):
            # only most recent and complete
            if from_timestamp <= timestamps[b] < to_timestamp:
                if not np.isnan(
                        self._tup[b]
                ) and self._tup[b] != last_up and self._tup[b] > 0.0:
                    last_up = self._tup[b]

                    self._up.append(last_up)
                    self._both.append(last_up)

                    if len(self._up) > self._max_history:
                        self._up.pop(0)

                    if len(self._both) > 2 * self._max_history:
                        self._both.pop(0)

                if not np.isnan(
                        self._tdn[b]
                ) and self._tdn[b] != last_dn and self._tdn[b] > 0.0:
                    last_dn = self._tdn[b]
                    # logger.info("%s %s" % (datetime.utcfromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S'), last_dn))

                    self._down.append(last_dn)
                    self._both.append(last_dn)

                    if len(self._down) > self._max_history:
                        self._down.pop(0)

                    if len(self._both) > 2 * self._max_history:
                        self._both.pop(0)

        # if self.timeframe == 60:
        #    logger.info("%s %s" % (self._tup, self._tdn))
        #    logger.info("%s %s" % (self._up, self._down))
        # if self.timeframe == 60*60*24:
        #     logger.info("%s" % (self._both))

        if len(atrs):
            # retains last ATR value
            self._last_atr = atrs[-1]

        self._last_timestamp = timestamp

        return self._up, self._down