def __init__( self, storage_engine, market_filter=None, market_data_filter=None, local_dir=LOCAL_DIR, ): self.storage_engine = storage_engine self.storage_engine.recorder = self self.market_filter = market_filter or streaming_market_filter() self.market_data_filter = market_data_filter or streaming_market_data_filter( fields=[ "EX_ALL_OFFERS", "EX_TRADED", "EX_TRADED_VOL", "EX_LTP", "EX_MARKET_DEF", "SP_TRADED", "SP_PROJECTED", ] ) self.local_dir = local_dir self.stream_id = ( create_short_uuid() ) # used to differentiate markets /<self.local_dir>/<stream_id> self.live_markets = [] # list of markets to be processed self._setup() logger.info("Recorder created %s" % self.stream_id)
def observe_market(observer): class FakeQueue: def put(self, value): observer.on_next(value) listener = betfairlightweight.StreamListener( output_queue=FakeQueue(), lightweight=True) self._market_stream = self._client.streaming.create_stream( listener=listener, description='BFG Market Stream') market_filter = streaming_market_filter(market_ids=[ market.marketId for market in self.todays_racecard ]) market_data_filter = streaming_market_data_filter( fields=[ 'EX_BEST_OFFERS', 'EX_TRADED', 'EX_TRADED_VOL', 'EX_LTP', 'EX_MARKET_DEF' ], ladder_levels=3, # Market depth ) self._market_stream.subscribe_to_markets( market_filter=market_filter, market_data_filter=market_data_filter, # conflate_ms=1000, ) self._market_stream.start(async=True)
def markets(filter_path, stream_name): if filter_path is None: return with open(filter_path, "r") as f: print("opening market filter file ... ") s = json.load(f) mf = streaming_market_filter(**s['market_filter']) mdf = streaming_market_data_filter(**s['market_data_filter']) print(sub(stream_name, subscription_message(mf, mdf)))
def __init__(self, storage_engine, market_filter=None, market_data_filter=None): self.storage_engine = storage_engine self.market_filter = market_filter or streaming_market_filter() self.market_data_filter = market_data_filter or streaming_market_data_filter( fields=[ 'EX_ALL_OFFERS', 'EX_TRADED', 'EX_TRADED_VOL', 'EX_LTP', 'EX_MARKET_DEF', 'SP_TRADED', 'SP_PROJECTED' ] ) self.stream_id = create_short_uuid() # used to differentiate markets /<FLUMINE_DATA>/<stream_id> self.live_markets = [] # list of markets to be processed self._setup() logger.info('Recorder created %s' % self.stream_id)
def start_betfair_ladder_stream(self, market_id: str, conflate_ms: float) -> queue.Queue: """Start the Betfair ladder stream. Args: market_id: Betfair market ID. conflate_ms: Conflation rate in milliseconds. Returns: Market ladder queue. """ if self.stream is not None: logger.info( "There is already a Betfair market stream running. Before " "starting a new stream, the existing one must be stopped.") ladder_queue = self.stream.listener.output_queue return ladder_queue logger.info("Initialising output queue.") ladder_queue = queue.Queue() logger.info("Initialising Betfair stream listener.") listener = betfairlightweight.StreamListener(ladder_queue) logger.info("Creating the Betfair market stream.") stream = self._client.streaming.create_stream(listener=listener) logger.info("Setting the market filter to market_id=%s.", market_id) market_filter_ = streaming_market_filter(market_ids=[market_id]) logger.info("Initialising streaming market data filter.") market_data_filter = streaming_market_data_filter( # fields=['EX_MARKET_DEF', 'EX_ALL_OFFERS'], # Without virtual bets fields=['EX_MARKET_DEF', 'EX_BEST_OFFERS_DISP'], # With virtual bets ladder_levels=10) logger.info("Subscribing to the market.") stream.subscribe_to_markets( market_filter=market_filter_, market_data_filter=market_data_filter, conflate_ms=min(conflate_ms, 120000), ) logger.info("Starting the Betfair market stream.") stream.start(async_=True) self.stream = stream return ladder_queue
def create_stream(self, output_queue, market_ids): """ Create Betfair stream that allow to establish connection once and receive MarketBook updates in real time """ # create stream listener listener = betfairlightweight.StreamListener(output_queue=output_queue) # create stream stream = self.api.streaming.create_stream(listener=listener) market_filter = filters.streaming_market_filter(market_ids=market_ids) market_data_filter = filters.streaming_market_data_filter( fields=["EX_BEST_OFFERS", "EX_MARKET_DEF"], ladder_levels=3) # subscribe streaming_unique_id = stream.subscribe_to_markets( market_filter=market_filter, market_data_filter=market_data_filter, conflate_ms=2000, # send update every 1000ms ) return stream
from betfairlightweight.resources import MarketBook from .runnercontext import RunnerContext from ..clients import BaseClient from ..markets.market import Market from ..streams.marketstream import BaseStream, MarketStream from ..utils import create_cheap_hash logger = logging.getLogger(__name__) STRATEGY_NAME_HASH_LENGTH = 13 DEFAULT_MARKET_DATA_FILTER = filters.streaming_market_data_filter(fields=[ "EX_ALL_OFFERS", "EX_TRADED", "EX_TRADED_VOL", "EX_LTP", "EX_MARKET_DEF", "SP_TRADED", "SP_PROJECTED", ]) class BaseStrategy: """ Strategy object to process MarketBook data from streams, order placement and handling logic to be added where required. Only MarketBooks from provided filter and data filter are processed. Runner context available to store current live trades.
def test_streaming_market_data_filter(self): response = streaming_market_data_filter() assert response == {} response = streaming_market_data_filter(ladder_levels=3) assert response == {"ladderLevels": 3}
trading = betfairlightweight.APIClient("username") client = clients.BetfairClient(trading) framework = Flumine(client=client) strategy = SingleStrategy( name="back_strat_42", market_filter=streaming_market_filter( event_type_ids=["7"], country_codes=["GB", "IE"], market_types=["WIN"], ), market_data_filter=streaming_market_data_filter( fields=[ "EX_BEST_OFFERS", "EX_LTP", "EX_MARKET_DEF", ], ladder_levels=1, ), conflate_ms=1000, # update every 1s max_trade_count=1, # 1 trade/order per selection only context={ "selections": [{ "market_id": "1.196154851", "selection_id": 28247970, "side": "LAY", "liability": 10.0, } # add more here.. ] },
def data_collection_pipeline() -> str: """Pipeline to collect Betfair odds market ladder streaming data. Returns: Path of the output CSV file. """ logging.basicConfig( format='%(levelname)-8s | %(asctime)s | %(name)s: %(message)s', level=logging.INFO) market_id, output_dir, conflate_ms, no_virtual_bets, allow_inplay,\ mins_before_start = parse_command_line_args() trading = bfl.APIClient(username=username, password=password, app_key=app_key, cert_files=[cert_file, cert_key_file]) logger.info("Logging in to Betfair") trading.login() # Event and market information event_type, event, competition = get_event_info(trading, market_id) market_name, market_start_time, selections = get_market_info( trading, market_id) # Wait to stream until a certain amount of minutes before the start if mins_before_start is not None: logger.info( "Logging off from Betfair and waiting until %s minutes before the " "start of the event. Press Ctrl+C to quit.", mins_before_start) trading.logout() now = datetime.utcnow() try: while market_start_time - now >= \ timedelta(minutes=mins_before_start): time.sleep(1) now = datetime.utcnow() except KeyboardInterrupt: logger.info("Exiting program (Keyboard interrupt)") exit(0) logger.info("Logging in to Betfair again.") trading.login() # Output file path output_file_name = get_output_file_name(event_type, event, competition, market_name, market_start_time) output_sqlite_file = os.path.join(output_dir, output_file_name + '.db') output_zip_file = os.path.join(output_dir, output_file_name + '.zip') # Market stream logger.info("Initialising output queue") output_queue = queue.Queue() logger.info("Initialising Betfair stream listener") listener = bfl.StreamListener(output_queue) logger.info("Creating the Betfair market stream") stream = trading.streaming.create_stream(listener=listener) logger.info("Setting the market filter to market_id=%s", market_id) market_filter = streaming_market_filter(market_ids=[market_id]) logger.info("Initialising streaming market data filter") if no_virtual_bets: market_data_fields = ['EX_MARKET_DEF', 'EX_ALL_OFFERS', 'EX_TRADED'] else: market_data_fields = [ 'EX_MARKET_DEF', 'EX_BEST_OFFERS_DISP', 'EX_TRADED' ] market_data_filter = streaming_market_data_filter( fields=market_data_fields, ) logger.info("Subscribing to the market") stream.subscribe_to_markets(market_filter=market_filter, market_data_filter=market_data_filter, conflate_ms=conflate_ms) logger.info("Starting the stream") stream.start(async_=True) logger.info(f"Saving data in file {output_sqlite_file}") connection, cursor = create_sqlite_database(output_sqlite_file) market_snapshot_no = 0 while True: try: market_books = output_queue.get() market_book = market_books[0] market_status = market_book.status market_inplay = market_book.inplay publish_time = market_book.publish_time # Stop the stream if the conditions are met if allow_inplay: if market_status == 'CLOSED': break else: if market_status == 'CLOSED' or market_inplay is True: break insert_in_market_status_table(cursor, publish_time, market_status, market_inplay) for runner in market_book.runners: selection = selections[runner.selection_id] selection_status = runner.status insert_in_selection_status_table(cursor, publish_time, selection, selection_status) for back in runner.ex.available_to_back: insert_in_available_to_back_table(cursor, publish_time, selection, back.price, back.size) for lay in runner.ex.available_to_lay: insert_in_available_to_lay_table(cursor, publish_time, selection, lay.price, lay.size) for volume in runner.ex.traded_volume: insert_in_traded_volume_table(cursor, publish_time, selection, volume.price, volume.size) connection.commit() market_snapshot_no = market_snapshot_no + 1 logger.info("Market snapshot #%s stored.", market_snapshot_no) except KeyboardInterrupt: logger.info("Exiting program (Keyboard interrupt)") break logger.info( "Stopping the stream and logging out from Betfair. This may take a few" " seconds.") stream.stop() try: trading.logout() except APIError: logger.warning("Failed to log out from Betfair: Connection error.") cursor.close() connection.close() logger.info("Compressing the Sqlite file into ZIP file %s", output_zip_file) with zipfile.ZipFile(output_zip_file, 'w', zipfile.ZIP_DEFLATED) as zip_f: zip_f.write(output_sqlite_file, os.path.basename(output_sqlite_file)) os.remove(output_sqlite_file) return output_zip_file
output_queue=output_queue, ) # create stream stream = trading.streaming.create_stream( listener=listener, ) # create filters (GB WIN racing) market_filter = streaming_market_filter( event_type_ids=['7'], country_codes=['GB'], market_types=['WIN'], ) market_data_filter = streaming_market_data_filter( fields=['EX_BEST_OFFERS', 'EX_MARKET_DEF'], ladder_levels=3, ) # subscribe streaming_unique_id = stream.subscribe_to_markets( market_filter=market_filter, market_data_filter=market_data_filter, conflate_ms=1000, # send update every 1000ms ) # start stream stream.start(async_=True) """ Data can also be accessed by using the snap function in the listener, e.g:
def data_collection_pipeline() -> str: """Pipeline to collect Betfair odds market ladder streaming data. Returns: Path of the output CSV file. """ logging.basicConfig( format='%(levelname)-8s | %(asctime)s | %(name)s: %(message)s', level=logging.INFO, # stream=sys.stdout ) market_id, output_dir, conflate_ms = parse_command_line_args() trading = bfl.APIClient(username=username, password=password, app_key=app_key, cert_files=[cert_file, cert_key_file]) logger.info("Logging in to Betfair") trading.login() # Event and market information event_type, event, competition = get_event_info(trading, market_id) market_name, market_start_time, selections = get_market_info( trading, market_id) # Output file path output_file_name = get_output_file_name(event_type, event, competition, market_name, market_start_time) output_file = os.path.join(output_dir, output_file_name) # Market stream logger.info("Initialising output queue") output_queue = queue.Queue() logger.info("Initialising Betfair stream listener") listener = bfl.StreamListener(output_queue) logger.info("Creating the Betfair market stream") stream = trading.streaming.create_stream(listener=listener) logger.info("Setting the market filter to market_id=%s", market_id) market_filter = streaming_market_filter(market_ids=[market_id]) logger.info("Initialising streaming market data filter") market_data_filter = streaming_market_data_filter( fields=['EX_MARKET_DEF', 'EX_ALL_OFFERS', 'EX_TRADED'], ) logger.info("Subscribing to the market") stream.subscribe_to_markets(market_filter=market_filter, market_data_filter=market_data_filter, conflate_ms=conflate_ms) logger.info("Starting the stream") stream.start(async_=True) logger.info(f"Saving data in file {output_file}") with open(output_file, 'w') as f: f_csv = csv.writer(f) csv_header = ['selection', 'time', 'price', 'size', 'side'] f_csv.writerow(csv_header) market_snapshot_no = 0 while True: try: market_books = output_queue.get() market_book = market_books[0] publish_time = market_book.publish_time rows = [] for runner in market_book.runners: selection_id = runner.selection_id for back in runner.ex.available_to_back: rows.append((selections[selection_id], publish_time, back.price, back.size, 'back')) for lay in runner.ex.available_to_lay: rows.append((selections[selection_id], publish_time, lay.price, lay.size, 'lay')) f_csv.writerows(rows) market_snapshot_no = market_snapshot_no + 1 logger.info("Market snapshot #%s stored.", market_snapshot_no) except KeyboardInterrupt: logger.info("Exiting program (Keyboard interrupt)") break logger.info("Stopping the stream and logging out from Betfair.") stream.stop() trading.logout() return output_file
async def send_subscription_message( self, market_ids: list = None, betting_types: list = None, event_type_ids: list = None, event_ids: list = None, turn_in_play_enabled: bool = None, market_types: list = None, venues: list = None, country_codes: list = None, race_types: list = None, initial_clk: str = None, clk: str = None, conflate_ms: int = None, heartbeat_ms: int = None, segmentation_enabled: bool = True, subscribe_book_updates=True, subscribe_trade_updates=True, subscribe_market_definitions=True, ): """ See `betfairlightweight.filters.streaming_market_filter` for full docstring :param market_ids: :param betting_types: :param event_type_ids: :param event_ids: :param turn_in_play_enabled: :param market_types: :param venues: :param country_codes: :param race_types: :param subscribe_book_updates: Subscribe to market orderbook events :param subscribe_trade_updates: Subscribe to market trade events :return: """ filters = ( market_ids, betting_types, event_type_ids, event_ids, turn_in_play_enabled, market_types, venues, country_codes, race_types, ) assert any(filters), "Must pass at least one filter" assert any( (subscribe_book_updates, subscribe_trade_updates) ), "Must subscribe to either book updates or trades" if market_ids is not None: # TODO - Log a warning about inefficiencies of specific market ids - Won't receive any updates for new # markets that fit criteria like when using event type / market type etc # logging.warning() pass market_filter = streaming_market_filter( market_ids=market_ids, betting_types=betting_types, event_type_ids=event_type_ids, event_ids=event_ids, turn_in_play_enabled=turn_in_play_enabled, market_types=market_types, venues=venues, country_codes=country_codes, race_types=race_types, ) data_fields = [] if subscribe_book_updates: data_fields.append("EX_ALL_OFFERS") if subscribe_trade_updates: data_fields.append("EX_TRADED") if subscribe_market_definitions: data_fields.append("EX_MARKET_DEF") market_data_filter = streaming_market_data_filter( fields=data_fields, ) message = { "op": "marketSubscription", "id": self.unique_id, "marketFilter": market_filter, "marketDataFilter": market_data_filter, "initialClk": initial_clk, "clk": clk, "conflateMs": conflate_ms, "heartbeatMs": heartbeat_ms, "segmentationEnabled": segmentation_enabled, } await self.send(raw=message)
username = os.environ.get('username') trading = betfairlightweight.APIClient(username) trading.login() # create queue output_queue = queue.Queue() # create stream listener listener = betfairlightweight.StreamListener(output_queue=output_queue, ) # create stream stream = trading.streaming.create_stream(listener=listener, ) # create filters (GB WIN racing) market_filter = streaming_market_filter(market_ids=['156436827']) market_data_filter = streaming_market_data_filter() # subscribe streaming_unique_id = stream.subscribe_to_markets( market_filter=market_filter, market_data_filter=market_data_filter, conflate_ms=1000, # send update every 1000ms ) # start stream stream.start(_async=True) """ Data can also be accessed by using the snap function in the listener, e.g: market_books = listener.snap( market_ids=[1.12345323]