Beispiel #1
0
            def observe_market(observer):
                class FakeQueue:
                    def put(self, value):
                        observer.on_next(value)

                listener = betfairlightweight.StreamListener(
                    output_queue=FakeQueue(), lightweight=True)
                self._market_stream = self._client.streaming.create_stream(
                    listener=listener, description='BFG Market Stream')
                market_filter = streaming_market_filter(market_ids=[
                    market.marketId for market in self.todays_racecard
                ])
                market_data_filter = streaming_market_data_filter(
                    fields=[
                        'EX_BEST_OFFERS', 'EX_TRADED', 'EX_TRADED_VOL',
                        'EX_LTP', 'EX_MARKET_DEF'
                    ],
                    ladder_levels=3,  # Market depth
                )
                self._market_stream.subscribe_to_markets(
                    market_filter=market_filter,
                    market_data_filter=market_data_filter,
                    # conflate_ms=1000,
                )
                self._market_stream.start(async=True)
Beispiel #2
0
 def __init__(
     self,
     storage_engine,
     market_filter=None,
     market_data_filter=None,
     local_dir=LOCAL_DIR,
 ):
     self.storage_engine = storage_engine
     self.storage_engine.recorder = self
     self.market_filter = market_filter or streaming_market_filter()
     self.market_data_filter = market_data_filter or streaming_market_data_filter(
         fields=[
             "EX_ALL_OFFERS",
             "EX_TRADED",
             "EX_TRADED_VOL",
             "EX_LTP",
             "EX_MARKET_DEF",
             "SP_TRADED",
             "SP_PROJECTED",
         ]
     )
     self.local_dir = local_dir
     self.stream_id = (
         create_short_uuid()
     )  # used to differentiate markets /<self.local_dir>/<stream_id>
     self.live_markets = []  # list of markets to be processed
     self._setup()
     logger.info("Recorder created %s" % self.stream_id)
Beispiel #3
0
def markets(filter_path, stream_name):
    if filter_path is None:
        return

    with open(filter_path, "r") as f:
        print("opening market filter file ... ")
        s = json.load(f)
        mf = streaming_market_filter(**s['market_filter'])
        mdf = streaming_market_data_filter(**s['market_data_filter'])

        print(sub(stream_name, subscription_message(mf, mdf)))
Beispiel #4
0
 def __init__(self, storage_engine, market_filter=None, market_data_filter=None):
     self.storage_engine = storage_engine
     self.market_filter = market_filter or streaming_market_filter()
     self.market_data_filter = market_data_filter or streaming_market_data_filter(
         fields=[
             'EX_ALL_OFFERS', 'EX_TRADED', 'EX_TRADED_VOL', 'EX_LTP', 'EX_MARKET_DEF', 'SP_TRADED', 'SP_PROJECTED'
         ]
     )
     self.stream_id = create_short_uuid()  # used to differentiate markets /<FLUMINE_DATA>/<stream_id>
     self.live_markets = []  # list of markets to be processed
     self._setup()
     logger.info('Recorder created %s' % self.stream_id)
Beispiel #5
0
    def start_betfair_ladder_stream(self, market_id: str,
                                    conflate_ms: float) -> queue.Queue:
        """Start the Betfair ladder stream.

        Args:
            market_id: Betfair market ID.
            conflate_ms: Conflation rate in milliseconds.

        Returns:
            Market ladder queue.
        """
        if self.stream is not None:
            logger.info(
                "There is already a Betfair market stream running. Before "
                "starting a new stream, the existing one must be stopped.")

            ladder_queue = self.stream.listener.output_queue

            return ladder_queue

        logger.info("Initialising output queue.")
        ladder_queue = queue.Queue()

        logger.info("Initialising Betfair stream listener.")
        listener = betfairlightweight.StreamListener(ladder_queue)

        logger.info("Creating the Betfair market stream.")
        stream = self._client.streaming.create_stream(listener=listener)

        logger.info("Setting the market filter to market_id=%s.", market_id)
        market_filter_ = streaming_market_filter(market_ids=[market_id])

        logger.info("Initialising streaming market data filter.")
        market_data_filter = streaming_market_data_filter(
            # fields=['EX_MARKET_DEF', 'EX_ALL_OFFERS'],  # Without virtual bets
            fields=['EX_MARKET_DEF',
                    'EX_BEST_OFFERS_DISP'],  # With virtual bets
            ladder_levels=10)

        logger.info("Subscribing to the market.")
        stream.subscribe_to_markets(
            market_filter=market_filter_,
            market_data_filter=market_data_filter,
            conflate_ms=min(conflate_ms, 120000),
        )

        logger.info("Starting the Betfair market stream.")
        stream.start(async_=True)

        self.stream = stream

        return ladder_queue
Beispiel #6
0
    def create_stream(self, output_queue, market_ids):
        """ Create Betfair stream that allow to establish
            connection once and receive MarketBook updates in real time
        """
        # create stream listener
        listener = betfairlightweight.StreamListener(output_queue=output_queue)

        # create stream
        stream = self.api.streaming.create_stream(listener=listener)

        market_filter = filters.streaming_market_filter(market_ids=market_ids)
        market_data_filter = filters.streaming_market_data_filter(
            fields=["EX_BEST_OFFERS", "EX_MARKET_DEF"], ladder_levels=3)

        # subscribe
        streaming_unique_id = stream.subscribe_to_markets(
            market_filter=market_filter,
            market_data_filter=market_data_filter,
            conflate_ms=2000,  # send update every 1000ms
        )

        return stream
Beispiel #7
0
                            order_type=LimitOrder(price=1.01,
                                                  size=selection["liability"]),
                        )
                        market.place_order(order)


trading = betfairlightweight.APIClient("username")
client = clients.BetfairClient(trading)

framework = Flumine(client=client)

strategy = SingleStrategy(
    name="back_strat_42",
    market_filter=streaming_market_filter(
        event_type_ids=["7"],
        country_codes=["GB", "IE"],
        market_types=["WIN"],
    ),
    market_data_filter=streaming_market_data_filter(
        fields=[
            "EX_BEST_OFFERS",
            "EX_LTP",
            "EX_MARKET_DEF",
        ],
        ladder_levels=1,
    ),
    conflate_ms=1000,  # update every 1s
    max_trade_count=1,  # 1 trade/order per selection only
    context={
        "selections": [{
            "market_id": "1.196154851",
def data_collection_pipeline() -> str:
    """Pipeline to collect Betfair odds market ladder streaming data.

    Returns:
        Path of the output CSV file.
    """
    logging.basicConfig(
        format='%(levelname)-8s | %(asctime)s | %(name)s:  %(message)s',
        level=logging.INFO)

    market_id, output_dir, conflate_ms, no_virtual_bets, allow_inplay,\
        mins_before_start = parse_command_line_args()

    trading = bfl.APIClient(username=username,
                            password=password,
                            app_key=app_key,
                            cert_files=[cert_file, cert_key_file])

    logger.info("Logging in to Betfair")
    trading.login()

    # Event and market information
    event_type, event, competition = get_event_info(trading, market_id)
    market_name, market_start_time, selections = get_market_info(
        trading, market_id)

    # Wait to stream until a certain amount of minutes before the start
    if mins_before_start is not None:
        logger.info(
            "Logging off from Betfair and waiting until %s minutes before the "
            "start of the event. Press Ctrl+C to quit.", mins_before_start)

        trading.logout()

        now = datetime.utcnow()
        try:
            while market_start_time - now >= \
                    timedelta(minutes=mins_before_start):
                time.sleep(1)
                now = datetime.utcnow()
        except KeyboardInterrupt:
            logger.info("Exiting program (Keyboard interrupt)")
            exit(0)

        logger.info("Logging in to Betfair again.")
        trading.login()

    # Output file path
    output_file_name = get_output_file_name(event_type, event, competition,
                                            market_name, market_start_time)
    output_sqlite_file = os.path.join(output_dir, output_file_name + '.db')
    output_zip_file = os.path.join(output_dir, output_file_name + '.zip')

    # Market stream
    logger.info("Initialising output queue")
    output_queue = queue.Queue()

    logger.info("Initialising Betfair stream listener")
    listener = bfl.StreamListener(output_queue)

    logger.info("Creating the Betfair market stream")
    stream = trading.streaming.create_stream(listener=listener)

    logger.info("Setting the market filter to market_id=%s", market_id)
    market_filter = streaming_market_filter(market_ids=[market_id])

    logger.info("Initialising streaming market data filter")
    if no_virtual_bets:
        market_data_fields = ['EX_MARKET_DEF', 'EX_ALL_OFFERS', 'EX_TRADED']
    else:
        market_data_fields = [
            'EX_MARKET_DEF', 'EX_BEST_OFFERS_DISP', 'EX_TRADED'
        ]
    market_data_filter = streaming_market_data_filter(
        fields=market_data_fields, )

    logger.info("Subscribing to the market")
    stream.subscribe_to_markets(market_filter=market_filter,
                                market_data_filter=market_data_filter,
                                conflate_ms=conflate_ms)

    logger.info("Starting the stream")
    stream.start(async_=True)

    logger.info(f"Saving data in file {output_sqlite_file}")
    connection, cursor = create_sqlite_database(output_sqlite_file)

    market_snapshot_no = 0

    while True:
        try:
            market_books = output_queue.get()
            market_book = market_books[0]

            market_status = market_book.status
            market_inplay = market_book.inplay
            publish_time = market_book.publish_time

            # Stop the stream if the conditions are met
            if allow_inplay:
                if market_status == 'CLOSED':
                    break
            else:
                if market_status == 'CLOSED' or market_inplay is True:
                    break

            insert_in_market_status_table(cursor, publish_time, market_status,
                                          market_inplay)

            for runner in market_book.runners:
                selection = selections[runner.selection_id]
                selection_status = runner.status

                insert_in_selection_status_table(cursor, publish_time,
                                                 selection, selection_status)

                for back in runner.ex.available_to_back:
                    insert_in_available_to_back_table(cursor, publish_time,
                                                      selection, back.price,
                                                      back.size)

                for lay in runner.ex.available_to_lay:
                    insert_in_available_to_lay_table(cursor, publish_time,
                                                     selection, lay.price,
                                                     lay.size)

                for volume in runner.ex.traded_volume:
                    insert_in_traded_volume_table(cursor, publish_time,
                                                  selection, volume.price,
                                                  volume.size)

            connection.commit()

            market_snapshot_no = market_snapshot_no + 1
            logger.info("Market snapshot #%s stored.", market_snapshot_no)

        except KeyboardInterrupt:
            logger.info("Exiting program (Keyboard interrupt)")
            break

    logger.info(
        "Stopping the stream and logging out from Betfair. This may take a few"
        " seconds.")
    stream.stop()
    try:
        trading.logout()
    except APIError:
        logger.warning("Failed to log out from Betfair: Connection error.")
    cursor.close()
    connection.close()

    logger.info("Compressing the Sqlite file into ZIP file %s",
                output_zip_file)
    with zipfile.ZipFile(output_zip_file, 'w', zipfile.ZIP_DEFLATED) as zip_f:
        zip_f.write(output_sqlite_file, os.path.basename(output_sqlite_file))
    os.remove(output_sqlite_file)

    return output_zip_file
Beispiel #9
0
output_queue = queue.Queue()

# create stream listener
listener = betfairlightweight.StreamListener(
    output_queue=output_queue,
)

# create stream
stream = trading.streaming.create_stream(
    listener=listener,
)

# create filters (GB WIN racing)
market_filter = streaming_market_filter(
    event_type_ids=['7'],
    country_codes=['GB'],
    market_types=['WIN'],
)
market_data_filter = streaming_market_data_filter(
    fields=['EX_BEST_OFFERS', 'EX_MARKET_DEF'],
    ladder_levels=3,
)

# subscribe
streaming_unique_id = stream.subscribe_to_markets(
    market_filter=market_filter,
    market_data_filter=market_data_filter,
    conflate_ms=1000,  # send update every 1000ms
)

# start stream
Beispiel #10
0
                self.place_order(market, order)

    def process_orders(self, market, orders):
        for order in orders:
            if order.status == OrderStatus.EXECUTABLE:
                if order.elapsed_seconds and order.elapsed_seconds > 5:
                    # print(order.bet_id, order.average_price_matched, order.size_matched)
                    if order.size_remaining == 2.00:
                        self.cancel_order(market, order, size_reduction=1.51)
                # self.update_order(market, order, "PERSIST")
                # if order.order_type.price == 1.01 and order.size_remaining == 0.49:
                #     self.replace_order(market, order, 1.02)
                # if order.order_type.price == 1.02:
                #     self.replace_order(market, order, 1.03)
                # if order.order_type.price == 1.03:
                #     self.replace_order(market, order, 1.05)
                pass


trading = betfairlightweight.APIClient("username")
client = clients.BetfairClient(trading)

framework = Flumine(client=client)

strategy = ExampleStrategy(
    market_filter=streaming_market_filter(market_ids=["1.170378175"]),
)
framework.add_strategy(strategy)

framework.run()
Beispiel #11
0
trading = bf_utils.api_login()

# grab a horse race
test_market = bf_utils.grab_market_id()

# create queue
output_queue = queue.Queue()

# create stream listener
listener = betfairlightweight.StreamListener(output_queue=output_queue)

# create stream
stream = trading.streaming.create_stream(listener=listener)

# create filters (GB WIN racing)
market_filter = streaming_market_filter(market_ids=[test_market])
market_data_filter = streaming_market_data_filter(
    fields=["EX_MARKET_DEF", "EX_BEST_OFFERS"], ladder_levels=3)

# subscribe
streaming_unique_id = stream.subscribe_to_markets(
    market_filter=market_filter,
    market_data_filter=market_data_filter,
    conflate_ms=1000,  # send update every 1000ms
)

# start stream in a new thread (in production would need err handling)
t = threading.Thread(target=stream.start, daemon=True)
t.start()

# check for updates in output queue
                runner_name = runner_names[runner.selection_id]
                best_back_price = utils.get_price(runner.ex.available_to_back,
                                                  0)
                # betconnect
                selection = selections_lookup[runner_name]
                max_price = selection.max_price

                diff = (1 / max_price) - (1 / best_back_price)

                print(runner_name, best_back_price, max_price, round(diff, 3))


framework = Flumine()

# add clients
betfair_client = clients.BetfairClient(
    betfairlightweight.APIClient("username"))
framework.add_client(betfair_client)

betconnect_client = clients.BetConnectClient(
    betconnect.APIClient("username", "password", "apiKey", "ppURL"))
framework.add_client(betconnect_client)

strategy = ExampleStrategy(
    market_filter=streaming_market_filter(market_ids=["1.196548740"]),
    streaming_timeout=2,
)
framework.add_strategy(strategy)

framework.run()
Beispiel #13
0
    async def send_subscription_message(
        self,
        market_ids: list = None,
        betting_types: list = None,
        event_type_ids: list = None,
        event_ids: list = None,
        turn_in_play_enabled: bool = None,
        market_types: list = None,
        venues: list = None,
        country_codes: list = None,
        race_types: list = None,
        initial_clk: str = None,
        clk: str = None,
        conflate_ms: int = None,
        heartbeat_ms: int = None,
        segmentation_enabled: bool = True,
        subscribe_book_updates=True,
        subscribe_trade_updates=True,
        subscribe_market_definitions=True,
    ):
        """
        See `betfairlightweight.filters.streaming_market_filter` for full docstring

        :param market_ids:
        :param betting_types:
        :param event_type_ids:
        :param event_ids:
        :param turn_in_play_enabled:
        :param market_types:
        :param venues:
        :param country_codes:
        :param race_types:
        :param subscribe_book_updates: Subscribe to market orderbook events
        :param subscribe_trade_updates: Subscribe to market trade events
        :return:
        """
        filters = (
            market_ids,
            betting_types,
            event_type_ids,
            event_ids,
            turn_in_play_enabled,
            market_types,
            venues,
            country_codes,
            race_types,
        )
        assert any(filters), "Must pass at least one filter"
        assert any(
            (subscribe_book_updates, subscribe_trade_updates)
        ), "Must subscribe to either book updates or trades"
        if market_ids is not None:
            # TODO - Log a warning about inefficiencies of specific market ids - Won't receive any updates for new
            #  markets that fit criteria like when using event type / market type etc
            # logging.warning()
            pass
        market_filter = streaming_market_filter(
            market_ids=market_ids,
            betting_types=betting_types,
            event_type_ids=event_type_ids,
            event_ids=event_ids,
            turn_in_play_enabled=turn_in_play_enabled,
            market_types=market_types,
            venues=venues,
            country_codes=country_codes,
            race_types=race_types,
        )
        data_fields = []
        if subscribe_book_updates:
            data_fields.append("EX_ALL_OFFERS")
        if subscribe_trade_updates:
            data_fields.append("EX_TRADED")
        if subscribe_market_definitions:
            data_fields.append("EX_MARKET_DEF")
        market_data_filter = streaming_market_data_filter(
            fields=data_fields,
        )

        message = {
            "op": "marketSubscription",
            "id": self.unique_id,
            "marketFilter": market_filter,
            "marketDataFilter": market_data_filter,
            "initialClk": initial_clk,
            "clk": clk,
            "conflateMs": conflate_ms,
            "heartbeatMs": heartbeat_ms,
            "segmentationEnabled": segmentation_enabled,
        }
        await self.send(raw=message)
Beispiel #14
0
# create trading instance (app key must be activated for streaming)
username = os.environ.get('username')
trading = betfairlightweight.APIClient(username)
trading.login()

# create queue
output_queue = queue.Queue()

# create stream listener
listener = betfairlightweight.StreamListener(output_queue=output_queue, )

# create stream
stream = trading.streaming.create_stream(listener=listener, )

# create filters (GB WIN racing)
market_filter = streaming_market_filter(market_ids=['156436827'])
market_data_filter = streaming_market_data_filter()

# subscribe
streaming_unique_id = stream.subscribe_to_markets(
    market_filter=market_filter,
    market_data_filter=market_data_filter,
    conflate_ms=1000,  # send update every 1000ms
)

# start stream
stream.start(_async=True)
"""
Data can also be accessed by using the snap function in the listener, e.g:

    market_books = listener.snap(
Beispiel #15
0
            score = market.context["score"]
            print(
                score.match_status,
                score.current_game,
                score.current_set,
                score.current_point,
                score.score.home.score,
                score.score.away.score,
            )


trading = betfairlightweight.APIClient("username")
client = clients.BetfairClient(trading)

framework = Flumine(client=client)

strategy = ExampleStrategy(
    market_filter=streaming_market_filter(market_ids=["1.172415939"]), )
framework.add_strategy(strategy)

framework.add_worker(
    BackgroundWorker(
        framework,
        poll_in_play_service,
        func_kwargs={"event_type_id": "2"},
        interval=30,
        start_delay=4,
    ))

framework.run()
Beispiel #16
0
def data_collection_pipeline() -> str:
    """Pipeline to collect Betfair odds market ladder streaming data.

    Returns:
        Path of the output CSV file.
    """
    logging.basicConfig(
        format='%(levelname)-8s | %(asctime)s | %(name)s:  %(message)s',
        level=logging.INFO,
        # stream=sys.stdout
    )

    market_id, output_dir, conflate_ms = parse_command_line_args()

    trading = bfl.APIClient(username=username,
                            password=password,
                            app_key=app_key,
                            cert_files=[cert_file, cert_key_file])

    logger.info("Logging in to Betfair")
    trading.login()

    # Event and market information
    event_type, event, competition = get_event_info(trading, market_id)
    market_name, market_start_time, selections = get_market_info(
        trading, market_id)

    # Output file path
    output_file_name = get_output_file_name(event_type, event, competition,
                                            market_name, market_start_time)
    output_file = os.path.join(output_dir, output_file_name)

    # Market stream
    logger.info("Initialising output queue")
    output_queue = queue.Queue()

    logger.info("Initialising Betfair stream listener")
    listener = bfl.StreamListener(output_queue)

    logger.info("Creating the Betfair market stream")
    stream = trading.streaming.create_stream(listener=listener)

    logger.info("Setting the market filter to market_id=%s", market_id)
    market_filter = streaming_market_filter(market_ids=[market_id])

    logger.info("Initialising streaming market data filter")
    market_data_filter = streaming_market_data_filter(
        fields=['EX_MARKET_DEF', 'EX_ALL_OFFERS', 'EX_TRADED'], )

    logger.info("Subscribing to the market")
    stream.subscribe_to_markets(market_filter=market_filter,
                                market_data_filter=market_data_filter,
                                conflate_ms=conflate_ms)

    logger.info("Starting the stream")
    stream.start(async_=True)

    logger.info(f"Saving data in file {output_file}")
    with open(output_file, 'w') as f:
        f_csv = csv.writer(f)

        csv_header = ['selection', 'time', 'price', 'size', 'side']
        f_csv.writerow(csv_header)

        market_snapshot_no = 0

        while True:
            try:
                market_books = output_queue.get()
                market_book = market_books[0]

                publish_time = market_book.publish_time

                rows = []
                for runner in market_book.runners:
                    selection_id = runner.selection_id

                    for back in runner.ex.available_to_back:
                        rows.append((selections[selection_id], publish_time,
                                     back.price, back.size, 'back'))

                    for lay in runner.ex.available_to_lay:
                        rows.append((selections[selection_id], publish_time,
                                     lay.price, lay.size, 'lay'))

                f_csv.writerows(rows)

                market_snapshot_no = market_snapshot_no + 1
                logger.info("Market snapshot #%s stored.", market_snapshot_no)

            except KeyboardInterrupt:
                logger.info("Exiting program (Keyboard interrupt)")
                break

    logger.info("Stopping the stream and logging out from Betfair.")
    stream.stop()
    trading.logout()

    return output_file
Beispiel #17
0
    def test_streaming_market_filter(self):
        response = streaming_market_filter()
        assert response == {}

        response = streaming_market_filter(market_ids=[1, 2])
        assert response == {"marketIds": [1, 2]}
log_handler = logging.StreamHandler()
formatter = jsonlogger.JsonFormatter(custom_format)
formatter.converter = time.gmtime
log_handler.setFormatter(formatter)
logger.addHandler(log_handler)
logger.setLevel(logging.INFO)


class ExampleStrategy(BaseStrategy):
    def process_sports_data(self, market, sports_data) -> None:
        # called on each update from sports-data-stream
        print(market, sports_data)


trading = betfairlightweight.APIClient("username")
client = clients.BetfairClient(trading)

framework = Flumine(client)

strategy = ExampleStrategy(
    market_filter=streaming_market_filter(
        event_type_ids=["4"], market_types=["MATCH_ODDS"]
    ),
    sports_data_filter=[
        "cricketSubscription"
    ],  # "cricketSubscription" and/or "raceSubscription"
)
framework.add_strategy(strategy)

framework.run()