Exemple #1
0
def main(config_path: str,
         start_time: str = None,
         end_time: str = None,
         strategy_dir: str = '.'):
    init_logging()
    config = BacktestConfig.load(Path(config_path), Path(strategy_dir),
                                 start_time, end_time)
    engine = AlgoBacktester(config)
    engine.run()
def ws_fh_main(create_fh, uri_scheme: str, instance_id: str, journal_path: str, db: str):
    init_logging()
    logger = logging.getLogger(__name__)

    conn = connect_serenity_db()
    conn.autocommit = True
    cur = conn.cursor()

    instr_cache = InstrumentCache(cur, TypeCodeCache(cur))

    scheduler = NetworkScheduler()
    registry = FeedHandlerRegistry()
    fh = create_fh(scheduler, instr_cache, instance_id)
    registry.register(fh)

    for instrument in fh.get_instruments():
        symbol = instrument.get_exchange_instrument_code()

        # subscribe to FeedState in advance so we know when the Feed is ready to subscribe trades
        class SubscribeTrades(Event):
            def __init__(self, trade_symbol):
                self.trade_symbol = trade_symbol
                self.appender = None

            def on_activate(self) -> bool:
                if fh.get_state().get_value() == FeedHandlerState.LIVE:
                    feed = registry.get_feed(f'{uri_scheme}:{instance_id}:{self.trade_symbol}')
                    instrument_code = feed.get_instrument().get_exchange_instrument_code()
                    journal = Journal(Path(f'{journal_path}/{db}/{instrument_code}'))
                    self.appender = journal.create_appender()

                    trades = feed.get_trades()
                    Do(scheduler.get_network(), trades, lambda: self.on_trade_print(trades.get_value()))
                return False

            def on_trade_print(self, trade):
                logger.info(trade)

                self.appender.write_double(datetime.utcnow().timestamp())
                self.appender.write_long(trade.get_trade_id())
                self.appender.write_long(trade.get_trade_id())
                self.appender.write_string(trade.get_instrument().get_exchange_instrument_code())
                self.appender.write_short(1 if trade.get_side().get_type_code() == 'Buy' else 0)
                self.appender.write_double(trade.get_qty())
                self.appender.write_double(trade.get_price())

        scheduler.get_network().connect(fh.get_state(), SubscribeTrades(symbol))

    # async start the feedhandler
    asyncio.ensure_future(fh.start())

    # crash out on any exception
    asyncio.get_event_loop().set_exception_handler(custom_asyncio_error_handler)

    # go!
    asyncio.get_event_loop().run_forever()
Exemple #3
0
def main(config_path: str, strategy_dir: str, start_time: str, end_time: str):
    init_logging()

    config = toml.load(config_path)
    strategy_module = config['strategy']['module']
    strategy_class = config['strategy']['class']
    loader = StrategyLoader(Path(strategy_dir))
    strategy_instance = loader.load(strategy_module, strategy_class)

    scheduler = HistoricNetworkScheduler.new_instance(start_time, end_time)
    backtester = InvestmentStrategyBacktester(scheduler)
    portfolio = backtester.run(strategy_instance, config)
    scheduler.run()

    for account in portfolio.get_accounts():
        print(f'Account: [{account.get_name()}]')
        print(f'\tcash: {account.get_cash_balance().get_balance()}')
        for position in account.get_positions():
            print(
                f'\t{position.get_tradable().get_symbol()}: {position.get_qty()} shares ({position.get_notional()})'
            )
Exemple #4
0
def tickstore_admin(action: str,
                    db: str,
                    staging_dir: str = '/mnt/raid/data/behemoth/db',
                    connect_str: str = None,
                    db_prefix: str = None):
    init_logging()

    if action == 'reindex':
        tickstore = LocalTickstore(Path(f'{staging_dir}/{db}'),
                                   timestamp_column='time')
        tickstore.index.reindex()
        tickstore.close()
    if action == 'strip_prefix':
        tickstore = LocalTickstore(Path(f'{staging_dir}/{db}'),
                                   timestamp_column='time')
        tickstore.index.strip_prefix(db_prefix)
        tickstore.close()
    elif action == 'list':
        tickstore = LocalTickstore(Path(f'{staging_dir}/{db}'),
                                   timestamp_column='time')
        for symbol in tickstore.index.symbols():
            print(symbol)
            for entry in tickstore.index.entries(symbol):
                print(f'\t{entry}')
    elif action == 'cloudsync':
        local_tickstore = LocalTickstore(Path(f'{staging_dir}/{db}'),
                                         timestamp_column='time')
        cloud_tickstore = AzureBlobTickstore(connect_str, db)
        for symbol in local_tickstore.index.symbols():
            for entry in local_tickstore.index.entries(symbol):
                logical_path = entry.path
                ticks = local_tickstore.read(logical_path)
                cloud_tickstore.insert(entry.symbol, entry.ts, ticks)

        local_tickstore.close()
        cloud_tickstore.close()
    else:
        raise Exception(f'Unknown action: {action}')
Exemple #5
0
def upload_main(behemoth_path: str = '/behemoth', days_back: int = 1):
    init_logging()
    logger = logging.getLogger(__name__)
    upload_date = datetime.datetime.utcnow().date() - datetime.timedelta(
        days_back)

    conn = connect_serenity_db()
    conn.autocommit = True
    cur = conn.cursor()
    instr_cache = InstrumentCache(cur, TypeCodeCache(cur))

    exchanges = {
        'Phemex': 'PHEMEX_TRADES',
        'CoinbasePro': 'COINBASE_PRO_TRADES'
    }
    for exchange, db in exchanges.items():
        for instrument in instr_cache.get_all_exchange_instruments(exchange):
            symbol = instrument.get_exchange_instrument_code()
            path = Path(f'{behemoth_path}/journals/{db}/{symbol}')
            journal = Journal(path)

            try:
                reader = journal.create_reader(upload_date)

                length = reader.get_length()
                records = []
                while reader.get_pos() < length:
                    time = reader.read_double()
                    sequence = reader.read_long()
                    trade_id = reader.read_long()
                    product_id = reader.read_string()
                    side = 'buy' if reader.read_short() == 0 else 'sell'
                    size = reader.read_double()
                    price = reader.read_double()

                    record = {
                        'time': datetime.datetime.fromtimestamp(time),
                        'sequence': sequence,
                        'trade_id': trade_id,
                        'product_id': product_id,
                        'side': side,
                        'size': size,
                        'price': price
                    }
                    records.append(record)

                if len(records) > 0:
                    logger.info(
                        f'uploading journaled {exchange}/{symbol} ticks to Behemoth for UTC date {str(upload_date)}'
                    )
                    df = pd.DataFrame(records)
                    df.set_index('time', inplace=True)
                    logger.info(f'extracted {len(df)} {symbol} trade records')
                    tickstore = LocalTickstore(
                        Path(Path(f'{behemoth_path}/db/{db}')), 'time')
                    tickstore.insert(symbol, BiTimestamp(upload_date), df)
                    tickstore.close()

                    logger.info(f'inserted {len(df)} {symbol} records')
                else:
                    logger.info(
                        f'zero {exchange}/{symbol} ticks for UTC date {str(upload_date)}'
                    )
                    tickstore = LocalTickstore(
                        Path(Path(f'{behemoth_path}/db/{db}')), 'time')
                    tickstore.close()
            except NoSuchJournalException:
                logger.error(f'missing journal file: {path}')
def main(config_path: str, strategy_dir='.'):
    init_logging()
    engine = AlgoEngine(config_path, strategy_dir)
    engine.start()
Exemple #7
0
def ws_fh_main(create_fh,
               uri_scheme: str,
               instance_id: str,
               journal_path: str,
               db: str,
               journal_books: bool = True,
               include_symbol: str = '*'):
    init_logging()
    logger = logging.getLogger(__name__)

    conn = connect_serenity_db()
    conn.autocommit = True
    cur = conn.cursor()

    instr_cache = InstrumentCache(cur, TypeCodeCache(cur))

    scheduler = RealtimeNetworkScheduler()
    registry = FeedHandlerRegistry()
    fh = create_fh(scheduler, instr_cache, include_symbol, instance_id)
    registry.register(fh)

    # register Prometheus metrics
    trade_counter = Counter('serenity_trade_counter',
                            'Number of trade prints received by feedhandler')
    book_update_counter = Counter(
        'serenity_book_update_counter',
        'Number of book updates received by feedhandler')

    for instrument in fh.get_instruments():
        symbol = instrument.get_exchange_instrument_code()
        if not (symbol == include_symbol or include_symbol == '*'):
            continue

        # subscribe to FeedState in advance so we know when the Feed is ready to subscribe trades
        class SubscribeTrades(Event):
            def __init__(self, trade_symbol):
                self.trade_symbol = trade_symbol
                self.tx_writer = None

            def on_activate(self) -> bool:
                if fh.get_state().get_value() == FeedHandlerState.LIVE:
                    feed = registry.get_feed(
                        f'{uri_scheme}:{instance_id}:{self.trade_symbol}')
                    instrument_code = feed.get_instrument(
                    ).get_exchange_instrument_code()
                    txlog = TransactionLog(
                        Path(f'{journal_path}/{db}_TRADES/{instrument_code}'))
                    self.tx_writer = txlog.create_writer()

                    trades = feed.get_trades()
                    Do(scheduler.get_network(), trades,
                       lambda: self.on_trade_print(trades.get_value()))
                return False

            def on_trade_print(self, trade):
                trade_counter.inc()
                logger.info(trade)

                trade_msg = capnp_def.TradeMessage.new_message()
                trade_msg.time = datetime.utcnow().timestamp()
                trade_msg.tradeId = trade.get_trade_id()
                trade_msg.side = capnp_def.Side.buy if trade.get_side(
                ) == Side.BUY else capnp_def.Side.sell
                trade_msg.size = trade.get_qty()
                trade_msg.price = trade.get_price()

                self.tx_writer.append_msg(trade_msg)

        if journal_books:

            class SubscribeOrderBook(Event):
                def __init__(self, trade_symbol):
                    self.trade_symbol = trade_symbol
                    self.tx_writer = None

                def on_activate(self) -> bool:
                    if fh.get_state().get_value() == FeedHandlerState.LIVE:
                        feed = registry.get_feed(
                            f'{uri_scheme}:{instance_id}:{self.trade_symbol}')
                        instrument_code = feed.get_instrument(
                        ).get_exchange_instrument_code()
                        txlog = TransactionLog(
                            Path(f'{journal_path}/{db}_BOOKS/{instrument_code}'
                                 ))
                        self.tx_writer = txlog.create_writer()

                        books = feed.get_order_books()
                        Do(scheduler.get_network(), books,
                           lambda: self.on_book_update(books.get_value()))
                    return False

                def on_book_update(self, book: OrderBook):
                    book_update_counter.inc()

                    book_msg = capnp_def.Level1BookUpdateMessage.new_message()
                    book_msg.time = datetime.utcnow().timestamp()
                    if len(book.get_bids()) > 0:
                        book_msg.bestBidQty = book.get_best_bid().get_qty()
                        book_msg.bestBidPx = book.get_best_bid().get_px()
                    else:
                        book_msg.bestBidQty = 0
                        book_msg.bestBidPx = 0

                    if len(book.get_asks()) > 0:
                        book_msg.bestAskQty = book.get_best_ask().get_qty()
                        book_msg.bestAskPx = book.get_best_ask().get_px()
                    else:
                        book_msg.bestAskQty = 0
                        book_msg.bestAskPx = 0

                    self.tx_writer.append_msg(book_msg)

            scheduler.get_network().connect(fh.get_state(),
                                            SubscribeOrderBook(symbol))

        scheduler.get_network().connect(fh.get_state(),
                                        SubscribeTrades(symbol))

    # launch the monitoring endpoint
    start_http_server(8000)

    # async start the feedhandler
    asyncio.ensure_future(fh.start())

    # crash out on any exception
    asyncio.get_event_loop().set_exception_handler(
        custom_asyncio_error_handler)

    # go!
    asyncio.get_event_loop().run_forever()
Exemple #8
0
import datetime
import logging
import os

from pathlib import Path

import fire
import pandas as pd

from serenity.db.api import connect_serenity_db, InstrumentCache, TypeCodeCache
from serenity.marketdata.tickstore.api import LocalTickstore, BiTimestamp, AzureBlobTickstore
from serenity.marketdata.tickstore.journal import Journal, NoSuchJournalException
from serenity.utils import init_logging

init_logging()
logger = logging.getLogger(__name__)


# noinspection DuplicatedCode
def upload_main(behemoth_path: str = '/behemoth',
                days_back: int = 1,
                upload_start_date: str = None,
                upload_end_date: str = None):
    if upload_start_date is not None and upload_end_date is not None:
        upload_start_date = datetime.datetime.strptime(upload_start_date,
                                                       '%Y-%m-%d').date()
        upload_end_date = datetime.datetime.strptime(upload_end_date,
                                                     '%Y-%m-%d').date()
        delta = upload_end_date - upload_start_date

        for i in range(delta.days + 1):