def do_upload(behemoth_path: str, upload_date: datetime.date, cloud_upload: bool): logger.info(f'uploading into Behemoth for upload date = {upload_date}') conn = connect_serenity_db() conn.autocommit = True cur = conn.cursor() instr_cache = InstrumentCache(cur, TypeCodeCache(cur)) exchanges = { 'PHEMEX': { 'db_prefix': 'PHEMEX', 'supported_products': {'BTCUSD'} }, 'COINBASEPRO': { 'db_prefix': 'COINBASE_PRO', 'supported_products': {'BTC-USD'} } } for exchange in exchanges.keys(): logger.info(f'Uploading for exchange: {exchange}') db_prefix = exchanges[exchange]['db_prefix'] supported_products = exchanges[exchange]['supported_products'] for instrument in instr_cache.get_all_exchange_instruments(exchange): symbol = instrument.get_exchange_instrument_code() if symbol in supported_products: upload_trades(behemoth_path, db_prefix, exchange, symbol, upload_date, cloud_upload) upload_order_books(behemoth_path, db_prefix, exchange, symbol, upload_date, cloud_upload)
def backfill_coinbase(): conn = connect_serenity_db() cur = conn.cursor() type_code_cache = TypeCodeCache(cur) instrument_cache = InstrumentCache(cur, type_code_cache) exch_service = ExchangeEntityService(cur, type_code_cache, instrument_cache) # create a default account for Coinbase exchange = type_code_cache.get_by_code(Exchange, 'Coinbase') account = exch_service.get_or_create_account( ExchangeAccount(0, exchange, 'default')) # create a BTC-USD symbol for Coinbase product_id = 'BTC-USD' instrument_type = type_code_cache.get_by_code(InstrumentType, 'CurrencyPair') instrument = instrument_cache.get_or_create_instrument( product_id, instrument_type) exchange_instrument = instrument_cache.get_or_create_exchange_instrument( product_id, instrument, 'Coinbase') # synthesize market orders and fills for Coinbase purchases side = type_code_cache.get_by_code(Side, 'Buy') order_type = type_code_cache.get_by_code(OrderType, 'Market') tif = type_code_cache.get_by_code(TimeInForce, 'Day') # noinspection DuplicatedCode def create_exchange_order_and_fill(price, quantity, fees, order_id, create_time): order = ExchangeOrder(0, exchange, exchange_instrument, order_type, account, side, tif, order_id, price, quantity, create_time) order = exch_service.get_or_create_exchange_order(order) conn.commit() fill = ExchangeFill(0, price, quantity, fees, order_id, create_time) fill.set_order(order) exch_service.get_or_create_exchange_fill(fill) conn.commit() # investment purchase create_exchange_order_and_fill(265.39, 1.13, 2.9993, 1, datetime(2015, 1, 26)) # residual left in account after payment by Bitcoin create_exchange_order_and_fill(238.47, 2.1 - 1.68136, 5.013, 2, datetime(2015, 2, 13)) # investment purchase create_exchange_order_and_fill(249.05, 0.5, 1.25, 3, datetime(2015, 2, 14))
def do_upload(behemoth_path: str, upload_date: datetime.date): logger.info(f'uploading into Behemoth for upload date = {upload_date}') conn = connect_serenity_db() conn.autocommit = True cur = conn.cursor() instr_cache = InstrumentCache(cur, TypeCodeCache(cur)) exchanges = {'PHEMEX': 'PHEMEX', 'COINBASE_PRO': 'COINBASE_PRO'} for exchange, db_prefix in exchanges.items(): for instrument in instr_cache.get_all_exchange_instruments(exchange): symbol = instrument.get_exchange_instrument_code() upload_trades(behemoth_path, db_prefix, exchange, symbol, upload_date) upload_order_books(behemoth_path, db_prefix, exchange, symbol, upload_date)
def generate_tax_report(): conn = connect_serenity_db() cur = conn.cursor() type_code_cache = TypeCodeCache(cur) instrument_cache = InstrumentCache(cur, type_code_cache) analyzer = TradeAnalyzer(cur, type_code_cache, instrument_cache) analyzer.run_analysis(['BTC-USD', 'ETH-USD'], 2019, 0.35)
def __init__(self, config: BacktestConfig): logger = logging.getLogger(__name__) logger.info('Serenity backtester starting up') sys.path.append(str(config.get_strategy_basedir())) bt_env = config.get_env() exchange_id = bt_env.getenv('EXCHANGE_ID', 'autofill') instance_id = bt_env.getenv('EXCHANGE_INSTANCE', 'prod') account = bt_env.getenv('EXCHANGE_ACCOUNT', 'Main') self.logger.info('Connecting to Serenity database') conn = connect_serenity_db() conn.autocommit = True cur = conn.cursor() self.scheduler = HistoricNetworkScheduler( config.get_start_time_millis(), config.get_end_time_millis()) instrument_cache = InstrumentCache(cur, TypeCodeCache(cur)) oms = OrderManagerService(self.scheduler) md_service = AzureHistoricMarketdataService( self.scheduler, bt_env.getenv('AZURE_CONNECT_STR')) mark_service = MarketdataMarkService(self.scheduler.get_network(), md_service) op_service = OrderPlacerService(self.scheduler, oms) op_service.register_order_placer( f'{exchange_id}:{instance_id}', AutoFillOrderPlacer(self.scheduler, oms, md_service, account)) xps = NullExchangePositionService(self.scheduler) extra_outputs_txt = bt_env.getenv('EXTRA_OUTPUTS') if extra_outputs_txt is None: extra_outputs = [] else: extra_outputs = extra_outputs_txt.split(',') self.dcs = HDF5DataCaptureService(Mode.BACKTEST, self.scheduler, extra_outputs) # wire up orders and fills from OMS Do(self.scheduler.get_network(), oms.get_orders(), lambda: self.dcs.capture_order(oms.get_orders().get_value())) Do(self.scheduler.get_network(), oms.get_order_events(), lambda: self.dcs.capture_fill(oms.get_order_events().get_value())) self.strategy_name = config.get_strategy_name() strategy_env = config.get_strategy_env() ctx = StrategyContext(self.scheduler, instrument_cache, md_service, mark_service, op_service, PositionService(self.scheduler, oms), xps, self.dcs, strategy_env.values) strategy_instance = config.get_strategy_instance() strategy_instance.init(ctx) strategy_instance.start()
def __init__(self, scheduler: NetworkScheduler, instrument_cache: InstrumentCache, instance_id: str): self.scheduler = scheduler self.instrument_cache = instrument_cache self.type_code_cache = instrument_cache.get_type_code_cache() self.instance_id = instance_id self.instruments = [] self.known_instrument_ids = {} self.price_scaling = {} self._load_instruments() self.state = MutableSignal(FeedHandlerState.INITIALIZING) self.scheduler.get_network().attach(self.state)
def ws_fh_main(create_fh, uri_scheme: str, instance_id: str, journal_path: str, db: str, journal_books: bool = True, include_symbol: str = '*'): init_logging() logger = logging.getLogger(__name__) conn = connect_serenity_db() conn.autocommit = True cur = conn.cursor() instr_cache = InstrumentCache(cur, TypeCodeCache(cur)) scheduler = RealtimeNetworkScheduler() registry = FeedHandlerRegistry() fh = create_fh(scheduler, instr_cache, include_symbol, instance_id) registry.register(fh) for instrument in fh.get_instruments(): symbol = instrument.get_exchange_instrument_code() if not (symbol == include_symbol or include_symbol == '*'): continue # subscribe to FeedState in advance so we know when the Feed is ready to subscribe trades class SubscribeTrades(Event): def __init__(self, trade_symbol): self.trade_symbol = trade_symbol self.appender = None def on_activate(self) -> bool: if fh.get_state().get_value() == FeedHandlerState.LIVE: feed = registry.get_feed(f'{uri_scheme}:{instance_id}:{self.trade_symbol}') instrument_code = feed.get_instrument().get_exchange_instrument_code() journal = Journal(Path(f'{journal_path}/{db}_TRADES/{instrument_code}')) self.appender = journal.create_appender() trades = feed.get_trades() Do(scheduler.get_network(), trades, lambda: self.on_trade_print(trades.get_value())) return False def on_trade_print(self, trade): logger.info(trade) self.appender.write_double(datetime.utcnow().timestamp()) self.appender.write_long(trade.get_trade_id()) self.appender.write_long(trade.get_trade_id()) self.appender.write_string(trade.get_instrument().get_exchange_instrument_code()) self.appender.write_short(1 if trade.get_side() == Side.BUY else 0) self.appender.write_double(trade.get_qty()) self.appender.write_double(trade.get_price()) if journal_books: class SubscribeOrderBook(Event): def __init__(self, trade_symbol): self.trade_symbol = trade_symbol self.appender = None def on_activate(self) -> bool: if fh.get_state().get_value() == FeedHandlerState.LIVE: feed = registry.get_feed(f'{uri_scheme}:{instance_id}:{self.trade_symbol}') instrument_code = feed.get_instrument().get_exchange_instrument_code() journal = Journal(Path(f'{journal_path}/{db}_BOOKS/{instrument_code}')) self.appender = journal.create_appender() books = feed.get_order_books() Do(scheduler.get_network(), books, lambda: self.on_book_update(books.get_value())) return False def on_book_update(self, book: OrderBook): self.appender.write_double(datetime.utcnow().timestamp()) if len(book.get_bids()) > 0: self.appender.write_long(book.get_best_bid().get_qty()) self.appender.write_double(book.get_best_bid().get_px()) else: self.appender.write_long(0) self.appender.write_double(0) if len(book.get_asks()) > 0: self.appender.write_long(book.get_best_ask().get_qty()) self.appender.write_double(book.get_best_ask().get_px()) else: self.appender.write_long(0) self.appender.write_double(0) scheduler.get_network().connect(fh.get_state(), SubscribeOrderBook(symbol)) scheduler.get_network().connect(fh.get_state(), SubscribeTrades(symbol)) # async start the feedhandler asyncio.ensure_future(fh.start()) # crash out on any exception asyncio.get_event_loop().set_exception_handler(custom_asyncio_error_handler) # go! asyncio.get_event_loop().run_forever()
import coinbasepro from phemex import PublicCredentials from serenity.exchange.gemini import GeminiConnection from serenity.exchange.phemex import get_phemex_connection from serenity.db.api import connect_serenity_db, InstrumentCache, TypeCodeCache, ExchangeEntityService if __name__ == '__main__': conn = connect_serenity_db() conn.autocommit = True cur = conn.cursor() type_code_cache = TypeCodeCache(cur) instrument_cache = InstrumentCache(cur, type_code_cache) exch_service = ExchangeEntityService(cur, type_code_cache, instrument_cache) # map all Gemini products to exchange_instrument table gemini_client = GeminiConnection() gemini = exch_service.instrument_cache.get_crypto_exchange("GEMINI") for symbol in gemini_client.get_products(): base_ccy = symbol[0:3].upper() quote_ccy = symbol[3:].upper() currency_pair = instrument_cache.get_or_create_cryptocurrency_pair(base_ccy, quote_ccy) instrument_cache.get_or_create_exchange_instrument(symbol, currency_pair.get_instrument(), gemini) # map all Coinbase Pro products to exchange_instrument table cbp_client = coinbasepro.PublicClient() cbp = exch_service.instrument_cache.get_crypto_exchange("COINBASEPRO") for product in cbp_client.get_products(): symbol = product['id']
def __init__(self, cur, type_code_cache: TypeCodeCache, instrument_cache: InstrumentCache): self.cur = cur self.type_code_cache = type_code_cache self.instrument_cache = instrument_cache self.trades = {} cur.execute("SELECT eo.side_id, eo.exchange_instrument_id, ef.fill_price, ef.quantity, ef.fees, ef.create_time " "FROM serenity.exchange_fill ef " "INNER JOIN serenity.exchange_order eo on eo.exchange_order_id = ef.exchange_order_id " "ORDER BY ef.create_time ASC ") for row in cur.fetchall(): side = type_code_cache.get_by_id(Side, row[0]) instrument = instrument_cache.get_entity_by_id(ExchangeInstrument, row[1]) instrument_code = instrument.get_instrument().get_instrument_code() if instrument_code not in self.trades: self.trades[instrument_code] = {} if side.get_type_code() not in self.trades[instrument_code]: self.trades[instrument_code][side.get_type_code()] = [] trade_info = { 'px': row[2], 'qty': row[3], 'remaining': row[3], 'fee': row[4], 'ts': row[5], } self.trades[instrument_code][side.get_type_code()].append(trade_info) # explode cross-currency trades into their component parts usd_ccy = instrument_cache.get_entity_by_ak(Currency, 'USD') for instrument_code in list(self.trades): pair_codes = tuple(instrument_code.split('-')) base_ccy = instrument_cache.get_entity_by_ak(Currency, pair_codes[0]) quote_ccy = instrument_cache.get_entity_by_ak(Currency, pair_codes[1]) if quote_ccy.get_currency_code() != 'USD': long = "{}-{}".format(base_ccy.get_currency_code(), usd_ccy.get_currency_code()) short = "{}-{}".format(quote_ccy.get_currency_code(), usd_ccy.get_currency_code()) if long not in self.trades: self.trades[long] = {'Buy': [], 'Sell': []} if short not in self.trades: self.trades[short] = {'Buy': [], 'Sell': []} # for each sell, buy short instrument and sell long instrument for sell in self.trades[instrument_code]['Sell']: self.trades[short]['Buy'].append({ 'px': self.lookup_instrument_mark(short, sell['ts']), 'qty': sell['qty'] * sell['px'], 'remaining': sell['qty'] * sell['px'], 'fee': 0.0, 'ts': sell['ts'] }) self.trades[long]['Sell'].append({ 'px': self.lookup_instrument_mark(long, sell['ts']), 'qty': sell['qty'], 'remaining': sell['qty'], 'fee': 0.0, 'ts': sell['ts'] }) # for each buy, buy long instrument and sell short instrument for buy in self.trades[instrument_code]['Buy']: self.trades[long]['Buy'].append({ 'px': self.lookup_instrument_mark(long, buy['ts']), 'qty': buy['qty'], 'remaining': buy['qty'], 'fee': 0.0, 'ts': buy['ts'] }) self.trades[short]['Sell'].append({ 'px': self.lookup_instrument_mark(short, buy['ts']), 'qty': buy['qty'] * buy['px'], 'remaining': buy['qty'] * buy['px'], 'fee': 0.0, 'ts': buy['ts'] }) # re-sort buys and sells by ts for instrument_code in list(self.trades): self.trades[instrument_code]['Buy'] = sorted(self.trades[instrument_code]['Buy'], key=lambda k: k['ts']) self.trades[instrument_code]['Sell'] = sorted(self.trades[instrument_code]['Sell'], key=lambda k: k['ts'])
def backfill_coinbasepro(api_key: str, api_secret: str, api_passphrase: str): conn = connect_serenity_db() cur = conn.cursor() type_code_cache = TypeCodeCache(cur) instrument_cache = InstrumentCache(cur, type_code_cache) exch_service = ExchangeEntityService(cur, type_code_cache, instrument_cache) auth_client = coinbasepro.AuthenticatedClient(key=api_key, secret=api_secret, passphrase=api_passphrase) # Coinbase Pro has a notion of account per currency for tracking balances, so we want to pull # out what it calls the profile, which is the parent exchange account profile_set = set() for account in auth_client.get_accounts(): profile_set.add(account['profile_id']) exchange = type_code_cache.get_by_code(Exchange, "CoinbasePro") account_by_profile_id = {} for profile in profile_set: account = exch_service.get_or_create_account( ExchangeAccount(0, exchange, profile)) account_by_profile_id[profile] = account # load up all the orders for order in auth_client.get_orders(status=['done']): order_uuid = order['id'] # market orders have no price if 'price' in order: price = order['price'] else: price = None # market orders that specify "funds" have no size if 'size' in order: size = order['size'] else: size = order['filled_size'] exchange_account = account_by_profile_id[order['profile_id']] instrument_type = type_code_cache.get_by_code(InstrumentType, 'CurrencyPair') instrument = instrument_cache.get_or_create_instrument( order['product_id'], instrument_type) exchange_instrument = instrument_cache.get_or_create_exchange_instrument( order['product_id'], instrument, exchange.get_type_code()) side = type_code_cache.get_by_code(Side, order['side'].capitalize()) if order['type'] is None: order['type'] = 'Market' order_type = type_code_cache.get_by_code(OrderType, order['type'].capitalize()) if 'time_in_force' in order: tif = type_code_cache.get_by_code(TimeInForce, order['time_in_force']) else: tif = type_code_cache.get_by_code(TimeInForce, 'Day') create_time = order['created_at'] order = ExchangeOrder(0, exchange, exchange_instrument, order_type, exchange_account, side, tif, order_uuid, price, size, create_time) exch_service.get_or_create_exchange_order(order) conn.commit() # load up all the fills, linking back to the orders for product in ['BTC-USD', 'ETH-BTC']: for fill in auth_client.get_fills(product_id=product): order_id = fill['order_id'] trade_id = fill['trade_id'] price = fill['price'] size = fill['size'] fees = fill['fee'] create_time = fill['created_at'] order = exch_service.get_entity_by_ak( ExchangeOrder, (exchange.get_type_code(), order_id)) fill = ExchangeFill(0, price, size, fees, trade_id, create_time) fill.set_order(order) exch_service.get_or_create_exchange_fill(fill) conn.commit()
import binance.client import coinbasepro import gemini from serenity.db.api import connect_serenity_db, InstrumentCache, TypeCodeCache if __name__ == '__main__': conn = connect_serenity_db() conn.autocommit = True cur = conn.cursor() type_code_cache = TypeCodeCache(cur) instrument_cache = InstrumentCache(cur, type_code_cache) # map all Gemini products to exchange_instrument table gemini_client = gemini.PublicClient() for symbol in gemini_client.symbols(): base_ccy = symbol[0:3].upper() quote_ccy = symbol[3:].upper() currency_pair = instrument_cache.get_or_create_cryptocurrency_pair(base_ccy, quote_ccy) instrument_cache.get_or_create_exchange_instrument(symbol, currency_pair.get_instrument(), 'Gemini') # map all Coinbase Pro products to exchange_instrument table cbp_client = coinbasepro.PublicClient() for product in cbp_client.get_products(): symbol = product['id'] base_ccy = product['base_currency'] quote_ccy = product['quote_currency'] currency_pair = instrument_cache.get_or_create_cryptocurrency_pair(base_ccy, quote_ccy) instrument_cache.get_or_create_exchange_instrument(symbol, currency_pair.get_instrument(), 'CoinbasePro') # map all Binance products to exchange_instrument table
def backfill_gemini(gemini_api_key: str, gemini_api_secret: str): conn = connect_serenity_db() cur = conn.cursor() type_code_cache = TypeCodeCache(cur) instrument_cache = InstrumentCache(cur, type_code_cache) exch_service = ExchangeEntityService(cur, type_code_cache, instrument_cache) client = gemini.PrivateClient(gemini_api_key, gemini_api_secret) for exchange_symbol in ('BTCUSD', 'ETHBTC', 'ZECBTC'): instrument_symbol = exchange_symbol[0:3] + '-' + exchange_symbol[3:] instrument_type = type_code_cache.get_by_code(InstrumentType, 'CurrencyPair') instrument = instrument_cache.get_or_create_instrument( instrument_symbol, instrument_type) exchange_instrument = instrument_cache.get_or_create_exchange_instrument( exchange_symbol.lower(), instrument, 'Gemini') conn.commit() exchange = type_code_cache.get_by_code(Exchange, 'Gemini') for trade in client.get_past_trades(exchange_symbol): fill_price = trade['price'] quantity = trade['amount'] fees = trade['fee_amount'] side = type_code_cache.get_by_code(Side, trade['type']) trade_id = trade['tid'] order_uuid = trade['order_id'] create_time_ms = trade['timestampms'] create_time = datetime.utcfromtimestamp(create_time_ms // 1000).\ replace(microsecond=create_time_ms % 1000 * 1000) # because we cannot get historical exchange orders past 7 days, we need to synthesize limit orders exchange_account = ExchangeAccount(0, exchange, 'default') exchange_account = exch_service.get_or_create_account( exchange_account) order_type = type_code_cache.get_by_code(OrderType, 'Limit') tif = type_code_cache.get_by_code(TimeInForce, 'GTC') order = ExchangeOrder(0, exchange, exchange_instrument, order_type, exchange_account, side, tif, order_uuid, fill_price, quantity, create_time) order = exch_service.get_or_create_exchange_order(order) conn.commit() # create the fills and insert, linking back to the synthetic order fill = ExchangeFill(0, fill_price, quantity, fees, trade_id, create_time) fill.set_order(order) exch_service.get_or_create_exchange_fill(fill) conn.commit() for transfer in client.api_query('/v1/transfers', {}): transfer_type = type_code_cache.get_by_code(ExchangeTransferType, transfer['type']) transfer_method = type_code_cache.get_by_code(ExchangeTransferMethod, "Blockchain") currency = instrument_cache.get_or_create_currency( transfer['currency']) quantity = transfer['amount'] transfer_ref = transfer['txHash'] transfer_time_ms = transfer['timestampms'] transfer_time = datetime.utcfromtimestamp(transfer_time_ms // 1000). \ replace(microsecond=transfer_time_ms % 1000 * 1000) transfer = ExchangeTransfer(0, exchange, transfer_type, transfer_method, currency, quantity, transfer_ref, transfer_time) exch_service.get_or_create_exchange_transfer(transfer) conn.commit()
def ws_fh_main(create_fh, uri_scheme: str, instance_id: str, journal_path: str, db: str, journal_books: bool = True, include_symbol: str = '*'): init_logging() logger = logging.getLogger(__name__) conn = connect_serenity_db() conn.autocommit = True cur = conn.cursor() instr_cache = InstrumentCache(cur, TypeCodeCache(cur)) scheduler = RealtimeNetworkScheduler() registry = FeedHandlerRegistry() fh = create_fh(scheduler, instr_cache, include_symbol, instance_id) registry.register(fh) # register Prometheus metrics trade_counter = Counter('serenity_trade_counter', 'Number of trade prints received by feedhandler') book_update_counter = Counter( 'serenity_book_update_counter', 'Number of book updates received by feedhandler') for instrument in fh.get_instruments(): symbol = instrument.get_exchange_instrument_code() if not (symbol == include_symbol or include_symbol == '*'): continue # subscribe to FeedState in advance so we know when the Feed is ready to subscribe trades class SubscribeTrades(Event): def __init__(self, trade_symbol): self.trade_symbol = trade_symbol self.tx_writer = None def on_activate(self) -> bool: if fh.get_state().get_value() == FeedHandlerState.LIVE: feed = registry.get_feed( f'{uri_scheme}:{instance_id}:{self.trade_symbol}') instrument_code = feed.get_instrument( ).get_exchange_instrument_code() txlog = TransactionLog( Path(f'{journal_path}/{db}_TRADES/{instrument_code}')) self.tx_writer = txlog.create_writer() trades = feed.get_trades() Do(scheduler.get_network(), trades, lambda: self.on_trade_print(trades.get_value())) return False def on_trade_print(self, trade): trade_counter.inc() logger.info(trade) trade_msg = capnp_def.TradeMessage.new_message() trade_msg.time = datetime.utcnow().timestamp() trade_msg.tradeId = trade.get_trade_id() trade_msg.side = capnp_def.Side.buy if trade.get_side( ) == Side.BUY else capnp_def.Side.sell trade_msg.size = trade.get_qty() trade_msg.price = trade.get_price() self.tx_writer.append_msg(trade_msg) if journal_books: class SubscribeOrderBook(Event): def __init__(self, trade_symbol): self.trade_symbol = trade_symbol self.tx_writer = None def on_activate(self) -> bool: if fh.get_state().get_value() == FeedHandlerState.LIVE: feed = registry.get_feed( f'{uri_scheme}:{instance_id}:{self.trade_symbol}') instrument_code = feed.get_instrument( ).get_exchange_instrument_code() txlog = TransactionLog( Path(f'{journal_path}/{db}_BOOKS/{instrument_code}' )) self.tx_writer = txlog.create_writer() books = feed.get_order_books() Do(scheduler.get_network(), books, lambda: self.on_book_update(books.get_value())) return False def on_book_update(self, book: OrderBook): book_update_counter.inc() book_msg = capnp_def.Level1BookUpdateMessage.new_message() book_msg.time = datetime.utcnow().timestamp() if len(book.get_bids()) > 0: book_msg.bestBidQty = book.get_best_bid().get_qty() book_msg.bestBidPx = book.get_best_bid().get_px() else: book_msg.bestBidQty = 0 book_msg.bestBidPx = 0 if len(book.get_asks()) > 0: book_msg.bestAskQty = book.get_best_ask().get_qty() book_msg.bestAskPx = book.get_best_ask().get_px() else: book_msg.bestAskQty = 0 book_msg.bestAskPx = 0 self.tx_writer.append_msg(book_msg) scheduler.get_network().connect(fh.get_state(), SubscribeOrderBook(symbol)) scheduler.get_network().connect(fh.get_state(), SubscribeTrades(symbol)) # launch the monitoring endpoint start_http_server(8000) # async start the feedhandler asyncio.ensure_future(fh.start()) # crash out on any exception asyncio.get_event_loop().set_exception_handler( custom_asyncio_error_handler) # go! asyncio.get_event_loop().run_forever()
def __init__(self, config_path: str, strategy_dir: str): sys.path.append(strategy_dir) with open(config_path, 'r') as config_yaml: logger = logging.getLogger(__name__) logger.info('Serenity starting up') config = yaml.safe_load(config_yaml) api_version = config['api-version'] if api_version != 'v1Beta': raise ValueError(f'Unsupported API version: {api_version}') self.engine_env = Environment(config['environment']) instance_id = self.engine_env.getenv('EXCHANGE_INSTANCE', 'prod') self.fh_registry = FeedHandlerRegistry() account = self.engine_env.getenv('EXCHANGE_ACCOUNT', 'Main') logger.info('Connecting to Serenity database') conn = connect_serenity_db() conn.autocommit = True cur = conn.cursor() scheduler = RealtimeNetworkScheduler() instrument_cache = InstrumentCache(cur, TypeCodeCache(cur)) if 'feedhandlers' in config: logger.info('Registering feedhandlers') for feedhandler in config['feedhandlers']: fh_name = feedhandler['exchange'] include_symbol = feedhandler.get('include_symbol', '*') if fh_name == 'Phemex': self.fh_registry.register( PhemexFeedHandler(scheduler, instrument_cache, include_symbol, instance_id)) elif fh_name == 'CoinbasePro': self.fh_registry.register( CoinbaseProFeedHandler(scheduler, instrument_cache, instance_id)) else: raise ValueError( f'Unsupported feedhandler type: {fh_name}') oms = OrderManagerService(scheduler, TimescaleDbTradeBookingService()) op_service = OrderPlacerService(scheduler, oms) md_service = FeedHandlerMarketdataService(scheduler, self.fh_registry, instance_id) mark_service = PhemexMarkService(scheduler, instrument_cache, instance_id) self.xps = None extra_outputs_txt = self.engine_env.getenv('EXTRA_OUTPUTS') if extra_outputs_txt is None: extra_outputs = [] else: extra_outputs = extra_outputs_txt.split(',') self.dcs = HDF5DataCaptureService(Mode.LIVE, scheduler, extra_outputs) if 'order_placers' in config: logger.info('Registering OrderPlacers') for order_placer in config['order_placers']: op_name = order_placer['exchange'] if op_name == 'Phemex': api_key = self.engine_env.getenv('PHEMEX_API_KEY') api_secret = self.engine_env.getenv( 'PHEMEX_API_SECRET') if not api_key: raise ValueError('missing PHEMEX_API_KEY') if not api_secret: raise ValueError('missing PHEMEX_API_SECRET') credentials = AuthCredentials(api_key, api_secret) op_service.register_order_placer( f'phemex:{instance_id}', PhemexOrderPlacer(credentials, scheduler, oms, account, instance_id)) self.xps = PhemexExchangePositionService( credentials, scheduler, instrument_cache, account, instance_id) self.ps = PositionService(scheduler, oms) else: raise ValueError( f'Unsupported order placer: {op_name}') self.strategies = [] self.strategy_names = [] for strategy in config['strategies']: strategy_name = strategy['name'] self.strategy_names.append(strategy_name) self.logger.info(f'Loading strategy: {strategy_name}') module = strategy['module'] strategy_class = strategy['strategy-class'] env = Environment(strategy['environment'], parent=self.engine_env) module = importlib.import_module(module) klass = getattr(module, strategy_class) strategy_instance = klass() ctx = StrategyContext(scheduler, instrument_cache, md_service, mark_service, op_service, self.ps, self.xps, self.dcs, env.values) self.strategies.append((strategy_instance, ctx))