Exemplo n.º 1
0
async def main():
    scheduler = RealtimeNetworkScheduler()
    network = scheduler.get_network()
    values = From(scheduler, [0.0, 3.2, 2.1, 2.9, 8.3, 5.7])
    mapper = Map(network, values, lambda x: round(x))
    accumulator = Scan(network, mapper)
    Do(network, accumulator, lambda: print(f"{accumulator.get_value()}"))
Exemplo n.º 2
0
async def main():
    scheduler = RealtimeNetworkScheduler()
    network = scheduler.get_network()
    values = Interval(scheduler)
    Do(network, values, lambda: print(f"input values: {values.get_value()}"))

    window = WindowWithCount(network, values, count=5)
    Do(network, window, lambda: print(f"window values: {window.get_value()}"))
Exemplo n.º 3
0
async def main():
    scheduler = RealtimeNetworkScheduler()
    network = scheduler.get_network()
    values = Interval(scheduler)
    Do(network, values, lambda: print(f"input values: {values.get_value()}"))

    buffer1 = BufferWithCount(network, values, count=2)
    Do(network, buffer1,
       lambda: print(f"buffer1 values: {buffer1.get_value()}"))

    buffer2 = BufferWithTime(scheduler, values, timedelta(seconds=5))
    Do(network, buffer2,
       lambda: print(f"buffer2 values: {buffer2.get_value()}"))
Exemplo n.º 4
0
async def main():
    scheduler = RealtimeNetworkScheduler()
    network = scheduler.get_network()
    values = From(scheduler, [0.0, 3.2, 2.1, 2.9, 8.3, 5.7])

    max_value = Max(network, values)
    min_value = Min(network, values)
    avg = Mean(network, values)
    stddev = Stddev(network, values)

    # noinspection PyUnusedLocal
    def print_stats(params):
        print(
            f"min = {min_value.get_value()}; max = {max_value.get_value()}; "
            f"avg = {avg.get_value():.2f}; stddev = {stddev.get_value():.2f}")

    Lambda(network, [min_value, max_value, avg, stddev], print_stats)
Exemplo n.º 5
0
def ws_fh_main(create_fh, uri_scheme: str, instance_id: str, journal_path: str, db: str, journal_books: bool = True,
               include_symbol: str = '*'):
    init_logging()
    logger = logging.getLogger(__name__)

    conn = connect_serenity_db()
    conn.autocommit = True
    cur = conn.cursor()

    instr_cache = InstrumentCache(cur, TypeCodeCache(cur))

    scheduler = RealtimeNetworkScheduler()
    registry = FeedHandlerRegistry()
    fh = create_fh(scheduler, instr_cache, include_symbol, instance_id)
    registry.register(fh)

    for instrument in fh.get_instruments():
        symbol = instrument.get_exchange_instrument_code()
        if not (symbol == include_symbol or include_symbol == '*'):
            continue

        # subscribe to FeedState in advance so we know when the Feed is ready to subscribe trades
        class SubscribeTrades(Event):
            def __init__(self, trade_symbol):
                self.trade_symbol = trade_symbol
                self.appender = None

            def on_activate(self) -> bool:
                if fh.get_state().get_value() == FeedHandlerState.LIVE:
                    feed = registry.get_feed(f'{uri_scheme}:{instance_id}:{self.trade_symbol}')
                    instrument_code = feed.get_instrument().get_exchange_instrument_code()
                    journal = Journal(Path(f'{journal_path}/{db}_TRADES/{instrument_code}'))
                    self.appender = journal.create_appender()

                    trades = feed.get_trades()
                    Do(scheduler.get_network(), trades, lambda: self.on_trade_print(trades.get_value()))
                return False

            def on_trade_print(self, trade):
                logger.info(trade)

                self.appender.write_double(datetime.utcnow().timestamp())
                self.appender.write_long(trade.get_trade_id())
                self.appender.write_long(trade.get_trade_id())
                self.appender.write_string(trade.get_instrument().get_exchange_instrument_code())
                self.appender.write_short(1 if trade.get_side() == Side.BUY else 0)
                self.appender.write_double(trade.get_qty())
                self.appender.write_double(trade.get_price())

        if journal_books:
            class SubscribeOrderBook(Event):
                def __init__(self, trade_symbol):
                    self.trade_symbol = trade_symbol
                    self.appender = None

                def on_activate(self) -> bool:
                    if fh.get_state().get_value() == FeedHandlerState.LIVE:
                        feed = registry.get_feed(f'{uri_scheme}:{instance_id}:{self.trade_symbol}')
                        instrument_code = feed.get_instrument().get_exchange_instrument_code()
                        journal = Journal(Path(f'{journal_path}/{db}_BOOKS/{instrument_code}'))
                        self.appender = journal.create_appender()

                        books = feed.get_order_books()
                        Do(scheduler.get_network(), books, lambda: self.on_book_update(books.get_value()))
                    return False

                def on_book_update(self, book: OrderBook):
                    self.appender.write_double(datetime.utcnow().timestamp())
                    if len(book.get_bids()) > 0:
                        self.appender.write_long(book.get_best_bid().get_qty())
                        self.appender.write_double(book.get_best_bid().get_px())
                    else:
                        self.appender.write_long(0)
                        self.appender.write_double(0)

                    if len(book.get_asks()) > 0:
                        self.appender.write_long(book.get_best_ask().get_qty())
                        self.appender.write_double(book.get_best_ask().get_px())
                    else:
                        self.appender.write_long(0)
                        self.appender.write_double(0)

            scheduler.get_network().connect(fh.get_state(), SubscribeOrderBook(symbol))

        scheduler.get_network().connect(fh.get_state(), SubscribeTrades(symbol))

    # async start the feedhandler
    asyncio.ensure_future(fh.start())

    # crash out on any exception
    asyncio.get_event_loop().set_exception_handler(custom_asyncio_error_handler)

    # go!
    asyncio.get_event_loop().run_forever()
Exemplo n.º 6
0
async def main():
    scheduler = RealtimeNetworkScheduler()
    network = scheduler.get_network()
    values = Interval(scheduler)
    accumulator = Scan(network, values)
    Do(network, accumulator, lambda: print(f"{accumulator.get_value()}"))
Exemplo n.º 7
0
async def main():
    scheduler = RealtimeNetworkScheduler()
    network = scheduler.get_network()
    values = From(scheduler, [0.0, 3.2, 2.1, 2.9, 8.3, 5.7])
    total = RunningSum(network, values)
    Lambda(network, total, lambda x: print(f'{x[0].get_value():.2f}'))
Exemplo n.º 8
0
def ws_fh_main(create_fh,
               uri_scheme: str,
               instance_id: str,
               journal_path: str,
               db: str,
               journal_books: bool = True,
               include_symbol: str = '*'):
    init_logging()
    logger = logging.getLogger(__name__)

    conn = connect_serenity_db()
    conn.autocommit = True
    cur = conn.cursor()

    instr_cache = InstrumentCache(cur, TypeCodeCache(cur))

    scheduler = RealtimeNetworkScheduler()
    registry = FeedHandlerRegistry()
    fh = create_fh(scheduler, instr_cache, include_symbol, instance_id)
    registry.register(fh)

    # register Prometheus metrics
    trade_counter = Counter('serenity_trade_counter',
                            'Number of trade prints received by feedhandler')
    book_update_counter = Counter(
        'serenity_book_update_counter',
        'Number of book updates received by feedhandler')

    for instrument in fh.get_instruments():
        symbol = instrument.get_exchange_instrument_code()
        if not (symbol == include_symbol or include_symbol == '*'):
            continue

        # subscribe to FeedState in advance so we know when the Feed is ready to subscribe trades
        class SubscribeTrades(Event):
            def __init__(self, trade_symbol):
                self.trade_symbol = trade_symbol
                self.tx_writer = None

            def on_activate(self) -> bool:
                if fh.get_state().get_value() == FeedHandlerState.LIVE:
                    feed = registry.get_feed(
                        f'{uri_scheme}:{instance_id}:{self.trade_symbol}')
                    instrument_code = feed.get_instrument(
                    ).get_exchange_instrument_code()
                    txlog = TransactionLog(
                        Path(f'{journal_path}/{db}_TRADES/{instrument_code}'))
                    self.tx_writer = txlog.create_writer()

                    trades = feed.get_trades()
                    Do(scheduler.get_network(), trades,
                       lambda: self.on_trade_print(trades.get_value()))
                return False

            def on_trade_print(self, trade):
                trade_counter.inc()
                logger.info(trade)

                trade_msg = capnp_def.TradeMessage.new_message()
                trade_msg.time = datetime.utcnow().timestamp()
                trade_msg.tradeId = trade.get_trade_id()
                trade_msg.side = capnp_def.Side.buy if trade.get_side(
                ) == Side.BUY else capnp_def.Side.sell
                trade_msg.size = trade.get_qty()
                trade_msg.price = trade.get_price()

                self.tx_writer.append_msg(trade_msg)

        if journal_books:

            class SubscribeOrderBook(Event):
                def __init__(self, trade_symbol):
                    self.trade_symbol = trade_symbol
                    self.tx_writer = None

                def on_activate(self) -> bool:
                    if fh.get_state().get_value() == FeedHandlerState.LIVE:
                        feed = registry.get_feed(
                            f'{uri_scheme}:{instance_id}:{self.trade_symbol}')
                        instrument_code = feed.get_instrument(
                        ).get_exchange_instrument_code()
                        txlog = TransactionLog(
                            Path(f'{journal_path}/{db}_BOOKS/{instrument_code}'
                                 ))
                        self.tx_writer = txlog.create_writer()

                        books = feed.get_order_books()
                        Do(scheduler.get_network(), books,
                           lambda: self.on_book_update(books.get_value()))
                    return False

                def on_book_update(self, book: OrderBook):
                    book_update_counter.inc()

                    book_msg = capnp_def.Level1BookUpdateMessage.new_message()
                    book_msg.time = datetime.utcnow().timestamp()
                    if len(book.get_bids()) > 0:
                        book_msg.bestBidQty = book.get_best_bid().get_qty()
                        book_msg.bestBidPx = book.get_best_bid().get_px()
                    else:
                        book_msg.bestBidQty = 0
                        book_msg.bestBidPx = 0

                    if len(book.get_asks()) > 0:
                        book_msg.bestAskQty = book.get_best_ask().get_qty()
                        book_msg.bestAskPx = book.get_best_ask().get_px()
                    else:
                        book_msg.bestAskQty = 0
                        book_msg.bestAskPx = 0

                    self.tx_writer.append_msg(book_msg)

            scheduler.get_network().connect(fh.get_state(),
                                            SubscribeOrderBook(symbol))

        scheduler.get_network().connect(fh.get_state(),
                                        SubscribeTrades(symbol))

    # launch the monitoring endpoint
    start_http_server(8000)

    # async start the feedhandler
    asyncio.ensure_future(fh.start())

    # crash out on any exception
    asyncio.get_event_loop().set_exception_handler(
        custom_asyncio_error_handler)

    # go!
    asyncio.get_event_loop().run_forever()
Exemplo n.º 9
0
    def __init__(self, config_path: str, strategy_dir: str):
        sys.path.append(strategy_dir)

        with open(config_path, 'r') as config_yaml:
            logger = logging.getLogger(__name__)

            logger.info('Serenity starting up')
            config = yaml.safe_load(config_yaml)
            api_version = config['api-version']
            if api_version != 'v1Beta':
                raise ValueError(f'Unsupported API version: {api_version}')

            self.engine_env = Environment(config['environment'])
            instance_id = self.engine_env.getenv('EXCHANGE_INSTANCE', 'prod')
            self.fh_registry = FeedHandlerRegistry()

            account = self.engine_env.getenv('EXCHANGE_ACCOUNT', 'Main')

            logger.info('Connecting to Serenity database')
            conn = connect_serenity_db()
            conn.autocommit = True
            cur = conn.cursor()

            scheduler = RealtimeNetworkScheduler()
            instrument_cache = InstrumentCache(cur, TypeCodeCache(cur))
            if 'feedhandlers' in config:
                logger.info('Registering feedhandlers')
                for feedhandler in config['feedhandlers']:
                    fh_name = feedhandler['exchange']
                    include_symbol = feedhandler.get('include_symbol', '*')
                    if fh_name == 'Phemex':
                        self.fh_registry.register(
                            PhemexFeedHandler(scheduler, instrument_cache,
                                              include_symbol, instance_id))
                    elif fh_name == 'CoinbasePro':
                        self.fh_registry.register(
                            CoinbaseProFeedHandler(scheduler, instrument_cache,
                                                   instance_id))
                    else:
                        raise ValueError(
                            f'Unsupported feedhandler type: {fh_name}')

            oms = OrderManagerService(scheduler,
                                      TimescaleDbTradeBookingService())
            op_service = OrderPlacerService(scheduler, oms)
            md_service = FeedHandlerMarketdataService(scheduler,
                                                      self.fh_registry,
                                                      instance_id)
            mark_service = PhemexMarkService(scheduler, instrument_cache,
                                             instance_id)
            self.xps = None

            extra_outputs_txt = self.engine_env.getenv('EXTRA_OUTPUTS')
            if extra_outputs_txt is None:
                extra_outputs = []
            else:
                extra_outputs = extra_outputs_txt.split(',')
            self.dcs = HDF5DataCaptureService(Mode.LIVE, scheduler,
                                              extra_outputs)

            if 'order_placers' in config:
                logger.info('Registering OrderPlacers')
                for order_placer in config['order_placers']:
                    op_name = order_placer['exchange']
                    if op_name == 'Phemex':
                        api_key = self.engine_env.getenv('PHEMEX_API_KEY')
                        api_secret = self.engine_env.getenv(
                            'PHEMEX_API_SECRET')
                        if not api_key:
                            raise ValueError('missing PHEMEX_API_KEY')
                        if not api_secret:
                            raise ValueError('missing PHEMEX_API_SECRET')

                        credentials = AuthCredentials(api_key, api_secret)
                        op_service.register_order_placer(
                            f'phemex:{instance_id}',
                            PhemexOrderPlacer(credentials, scheduler, oms,
                                              account, instance_id))

                        self.xps = PhemexExchangePositionService(
                            credentials, scheduler, instrument_cache, account,
                            instance_id)
                        self.ps = PositionService(scheduler, oms)
                    else:
                        raise ValueError(
                            f'Unsupported order placer: {op_name}')

            self.strategies = []
            self.strategy_names = []
            for strategy in config['strategies']:
                strategy_name = strategy['name']
                self.strategy_names.append(strategy_name)
                self.logger.info(f'Loading strategy: {strategy_name}')
                module = strategy['module']
                strategy_class = strategy['strategy-class']
                env = Environment(strategy['environment'],
                                  parent=self.engine_env)

                module = importlib.import_module(module)
                klass = getattr(module, strategy_class)
                strategy_instance = klass()
                ctx = StrategyContext(scheduler, instrument_cache, md_service,
                                      mark_service, op_service, self.ps,
                                      self.xps, self.dcs, env.values)
                self.strategies.append((strategy_instance, ctx))
Exemplo n.º 10
0
async def main():
    scheduler = RealtimeNetworkScheduler()
    signal = From(scheduler, ["world"])
    Do(scheduler.get_network(), signal,
       lambda: print(f"Hello, {signal.get_value()}!"))
Exemplo n.º 11
0
async def main():
    scheduler = RealtimeNetworkScheduler()
    network = scheduler.get_network()
    values = From(scheduler, [0.0, -3.2, 2.1, -2.9, 8.3, -5.7])
    filt = Filter(network, values, lambda x: x >= 0.0)
    Do(network, filt, lambda: print(f"{filt.get_value()}"))