Exemplo n.º 1
0
            def observe_market(observer):
                class FakeQueue:
                    def put(self, value):
                        observer.on_next(value)

                listener = betfairlightweight.StreamListener(
                    output_queue=FakeQueue(), lightweight=True)
                self._market_stream = self._client.streaming.create_stream(
                    listener=listener, description='BFG Market Stream')
                market_filter = streaming_market_filter(market_ids=[
                    market.marketId for market in self.todays_racecard
                ])
                market_data_filter = streaming_market_data_filter(
                    fields=[
                        'EX_BEST_OFFERS', 'EX_TRADED', 'EX_TRADED_VOL',
                        'EX_LTP', 'EX_MARKET_DEF'
                    ],
                    ladder_levels=3,  # Market depth
                )
                self._market_stream.subscribe_to_markets(
                    market_filter=market_filter,
                    market_data_filter=market_data_filter,
                    # conflate_ms=1000,
                )
                self._market_stream.start(async=True)
Exemplo n.º 2
0
            def observe_order(observer):
                class FakeQueue:
                    def put(self, value):
                        observer.on_next(value)

                listener = betfairlightweight.StreamListener(
                    output_queue=FakeQueue(), lightweight=False)
                self._order_stream = self._client.streaming.create_stream(
                    listener=listener, description='BFG Order Stream')
                self._order_stream.subscribe_to_orders()
                self._order_stream.start(_async=True)
Exemplo n.º 3
0
    def start_betfair_ladder_stream(self, market_id: str,
                                    conflate_ms: float) -> queue.Queue:
        """Start the Betfair ladder stream.

        Args:
            market_id: Betfair market ID.
            conflate_ms: Conflation rate in milliseconds.

        Returns:
            Market ladder queue.
        """
        if self.stream is not None:
            logger.info(
                "There is already a Betfair market stream running. Before "
                "starting a new stream, the existing one must be stopped.")

            ladder_queue = self.stream.listener.output_queue

            return ladder_queue

        logger.info("Initialising output queue.")
        ladder_queue = queue.Queue()

        logger.info("Initialising Betfair stream listener.")
        listener = betfairlightweight.StreamListener(ladder_queue)

        logger.info("Creating the Betfair market stream.")
        stream = self._client.streaming.create_stream(listener=listener)

        logger.info("Setting the market filter to market_id=%s.", market_id)
        market_filter_ = streaming_market_filter(market_ids=[market_id])

        logger.info("Initialising streaming market data filter.")
        market_data_filter = streaming_market_data_filter(
            # fields=['EX_MARKET_DEF', 'EX_ALL_OFFERS'],  # Without virtual bets
            fields=['EX_MARKET_DEF',
                    'EX_BEST_OFFERS_DISP'],  # With virtual bets
            ladder_levels=10)

        logger.info("Subscribing to the market.")
        stream.subscribe_to_markets(
            market_filter=market_filter_,
            market_data_filter=market_data_filter,
            conflate_ms=min(conflate_ms, 120000),
        )

        logger.info("Starting the Betfair market stream.")
        stream.start(async_=True)

        self.stream = stream

        return ladder_queue
Exemplo n.º 4
0
    def create_stream(self, output_queue, market_ids):
        """ Create Betfair stream that allow to establish
            connection once and receive MarketBook updates in real time
        """
        # create stream listener
        listener = betfairlightweight.StreamListener(output_queue=output_queue)

        # create stream
        stream = self.api.streaming.create_stream(listener=listener)

        market_filter = filters.streaming_market_filter(market_ids=market_ids)
        market_data_filter = filters.streaming_market_data_filter(
            fields=["EX_BEST_OFFERS", "EX_MARKET_DEF"], ladder_levels=3)

        # subscribe
        streaming_unique_id = stream.subscribe_to_markets(
            market_filter=market_filter,
            market_data_filter=market_data_filter,
            conflate_ms=2000,  # send update every 1000ms
        )

        return stream
    "data/2021_10_OctRacingAUPro.tar",
    "data/2021_11_NovRacingAUPro.tar",
    "data/2021_12_DecRacingAUPro.tar",
]

# setup logging
logging.basicConfig(level=logging.FATAL)

# create trading instance (don't need username/password)
trading = betfairlightweight.APIClient("username", "password", "appkey")

# create listener
listener = betfairlightweight.StreamListener(
    max_latency=None,  # ignore latency errors
    output_queue=None,  # use generator rather than a queue (faster)
    lightweight=False,  # lightweight mode is faster
    update_clk=
    False,  # do not update clk on updates (not required when backtesting)
    cumulative_runner_tv=True,
    calculate_market_tv=True)


# loading from tar and extracting files
def load_markets(file_paths: List[str]):
    for file_path in file_paths:
        if os.path.isdir(file_path):
            for path in glob.iglob(file_path + '**/**/*.bz2', recursive=True):
                f = bz2.BZ2File(path, 'rb')
                yield f
                f.close()
        elif os.path.isfile(file_path):
            ext = os.path.splitext(file_path)[1]
Exemplo n.º 6
0
trading = betfairlightweight.APIClient(username,
                                       pw,
                                       app_key=app_key,
                                       certs=certs_path)
trading.login()

# Log in - if you haven't set up your certs use this
# trading = betfairlightweight.APIClient(username, pw, app_key=app_key)
# trading.login_interactive()

# create queue
output_queue = queue.Queue()

# create stream listener
print('Creating listener')
listener = betfairlightweight.StreamListener(output_queue=output_queue, )

# create stream
print('Creating stream')
stream = trading.streaming.create_stream(listener=listener, )

# Define a market filter
thoroughbreds_event_filter = betfairlightweight.filters.market_filter(
    event_type_ids=['7'],  # Thoroughbreds event type id is 7
    market_countries=['AU'],
    market_start_time={
        'to': (datetime.datetime.utcnow() +
               datetime.timedelta(days=1)).strftime("%Y-%m-%dT%TZ")
    })

# Get a list of all thoroughbred events as objects
def data_collection_pipeline() -> str:
    """Pipeline to collect Betfair odds market ladder streaming data.

    Returns:
        Path of the output CSV file.
    """
    logging.basicConfig(
        format='%(levelname)-8s | %(asctime)s | %(name)s:  %(message)s',
        level=logging.INFO)

    market_id, output_dir, conflate_ms, no_virtual_bets, allow_inplay,\
        mins_before_start = parse_command_line_args()

    trading = bfl.APIClient(username=username,
                            password=password,
                            app_key=app_key,
                            cert_files=[cert_file, cert_key_file])

    logger.info("Logging in to Betfair")
    trading.login()

    # Event and market information
    event_type, event, competition = get_event_info(trading, market_id)
    market_name, market_start_time, selections = get_market_info(
        trading, market_id)

    # Wait to stream until a certain amount of minutes before the start
    if mins_before_start is not None:
        logger.info(
            "Logging off from Betfair and waiting until %s minutes before the "
            "start of the event. Press Ctrl+C to quit.", mins_before_start)

        trading.logout()

        now = datetime.utcnow()
        try:
            while market_start_time - now >= \
                    timedelta(minutes=mins_before_start):
                time.sleep(1)
                now = datetime.utcnow()
        except KeyboardInterrupt:
            logger.info("Exiting program (Keyboard interrupt)")
            exit(0)

        logger.info("Logging in to Betfair again.")
        trading.login()

    # Output file path
    output_file_name = get_output_file_name(event_type, event, competition,
                                            market_name, market_start_time)
    output_sqlite_file = os.path.join(output_dir, output_file_name + '.db')
    output_zip_file = os.path.join(output_dir, output_file_name + '.zip')

    # Market stream
    logger.info("Initialising output queue")
    output_queue = queue.Queue()

    logger.info("Initialising Betfair stream listener")
    listener = bfl.StreamListener(output_queue)

    logger.info("Creating the Betfair market stream")
    stream = trading.streaming.create_stream(listener=listener)

    logger.info("Setting the market filter to market_id=%s", market_id)
    market_filter = streaming_market_filter(market_ids=[market_id])

    logger.info("Initialising streaming market data filter")
    if no_virtual_bets:
        market_data_fields = ['EX_MARKET_DEF', 'EX_ALL_OFFERS', 'EX_TRADED']
    else:
        market_data_fields = [
            'EX_MARKET_DEF', 'EX_BEST_OFFERS_DISP', 'EX_TRADED'
        ]
    market_data_filter = streaming_market_data_filter(
        fields=market_data_fields, )

    logger.info("Subscribing to the market")
    stream.subscribe_to_markets(market_filter=market_filter,
                                market_data_filter=market_data_filter,
                                conflate_ms=conflate_ms)

    logger.info("Starting the stream")
    stream.start(async_=True)

    logger.info(f"Saving data in file {output_sqlite_file}")
    connection, cursor = create_sqlite_database(output_sqlite_file)

    market_snapshot_no = 0

    while True:
        try:
            market_books = output_queue.get()
            market_book = market_books[0]

            market_status = market_book.status
            market_inplay = market_book.inplay
            publish_time = market_book.publish_time

            # Stop the stream if the conditions are met
            if allow_inplay:
                if market_status == 'CLOSED':
                    break
            else:
                if market_status == 'CLOSED' or market_inplay is True:
                    break

            insert_in_market_status_table(cursor, publish_time, market_status,
                                          market_inplay)

            for runner in market_book.runners:
                selection = selections[runner.selection_id]
                selection_status = runner.status

                insert_in_selection_status_table(cursor, publish_time,
                                                 selection, selection_status)

                for back in runner.ex.available_to_back:
                    insert_in_available_to_back_table(cursor, publish_time,
                                                      selection, back.price,
                                                      back.size)

                for lay in runner.ex.available_to_lay:
                    insert_in_available_to_lay_table(cursor, publish_time,
                                                     selection, lay.price,
                                                     lay.size)

                for volume in runner.ex.traded_volume:
                    insert_in_traded_volume_table(cursor, publish_time,
                                                  selection, volume.price,
                                                  volume.size)

            connection.commit()

            market_snapshot_no = market_snapshot_no + 1
            logger.info("Market snapshot #%s stored.", market_snapshot_no)

        except KeyboardInterrupt:
            logger.info("Exiting program (Keyboard interrupt)")
            break

    logger.info(
        "Stopping the stream and logging out from Betfair. This may take a few"
        " seconds.")
    stream.stop()
    try:
        trading.logout()
    except APIError:
        logger.warning("Failed to log out from Betfair: Connection error.")
    cursor.close()
    connection.close()

    logger.info("Compressing the Sqlite file into ZIP file %s",
                output_zip_file)
    with zipfile.ZipFile(output_zip_file, 'w', zipfile.ZIP_DEFLATED) as zip_f:
        zip_f.write(output_sqlite_file, os.path.basename(output_sqlite_file))
    os.remove(output_sqlite_file)

    return output_zip_file
Exemplo n.º 8
0
def data_collection_pipeline() -> str:
    """Pipeline to collect Betfair odds market ladder streaming data.

    Returns:
        Path of the output CSV file.
    """
    logging.basicConfig(
        format='%(levelname)-8s | %(asctime)s | %(name)s:  %(message)s',
        level=logging.INFO,
        # stream=sys.stdout
    )

    market_id, output_dir, conflate_ms = parse_command_line_args()

    trading = bfl.APIClient(username=username,
                            password=password,
                            app_key=app_key,
                            cert_files=[cert_file, cert_key_file])

    logger.info("Logging in to Betfair")
    trading.login()

    # Event and market information
    event_type, event, competition = get_event_info(trading, market_id)
    market_name, market_start_time, selections = get_market_info(
        trading, market_id)

    # Output file path
    output_file_name = get_output_file_name(event_type, event, competition,
                                            market_name, market_start_time)
    output_file = os.path.join(output_dir, output_file_name)

    # Market stream
    logger.info("Initialising output queue")
    output_queue = queue.Queue()

    logger.info("Initialising Betfair stream listener")
    listener = bfl.StreamListener(output_queue)

    logger.info("Creating the Betfair market stream")
    stream = trading.streaming.create_stream(listener=listener)

    logger.info("Setting the market filter to market_id=%s", market_id)
    market_filter = streaming_market_filter(market_ids=[market_id])

    logger.info("Initialising streaming market data filter")
    market_data_filter = streaming_market_data_filter(
        fields=['EX_MARKET_DEF', 'EX_ALL_OFFERS', 'EX_TRADED'], )

    logger.info("Subscribing to the market")
    stream.subscribe_to_markets(market_filter=market_filter,
                                market_data_filter=market_data_filter,
                                conflate_ms=conflate_ms)

    logger.info("Starting the stream")
    stream.start(async_=True)

    logger.info(f"Saving data in file {output_file}")
    with open(output_file, 'w') as f:
        f_csv = csv.writer(f)

        csv_header = ['selection', 'time', 'price', 'size', 'side']
        f_csv.writerow(csv_header)

        market_snapshot_no = 0

        while True:
            try:
                market_books = output_queue.get()
                market_book = market_books[0]

                publish_time = market_book.publish_time

                rows = []
                for runner in market_book.runners:
                    selection_id = runner.selection_id

                    for back in runner.ex.available_to_back:
                        rows.append((selections[selection_id], publish_time,
                                     back.price, back.size, 'back'))

                    for lay in runner.ex.available_to_lay:
                        rows.append((selections[selection_id], publish_time,
                                     lay.price, lay.size, 'lay'))

                f_csv.writerows(rows)

                market_snapshot_no = market_snapshot_no + 1
                logger.info("Market snapshot #%s stored.", market_snapshot_no)

            except KeyboardInterrupt:
                logger.info("Exiting program (Keyboard interrupt)")
                break

    logger.info("Stopping the stream and logging out from Betfair.")
    stream.stop()
    trading.logout()

    return output_file
Exemplo n.º 9
0
# filtering markets to those that fit the following criteria
def filter_market(market: MarketBook) -> bool:
    d = market.market_definition
    return (d.country_code == 'AU' and d.market_type == 'WIN'
            and (c := split_anz_horse_market_name(d.name)[2]) != 'trot'
            and c != 'pace')


# setup logging
logging.basicConfig(level=logging.FATAL)

# create trading instance (don't need username/password)
trading = betfairlightweight.APIClient("username", "password")

# create listener
listener = betfairlightweight.StreamListener(max_latency=None)

# record prices to a file
with open("output.csv", "w") as output:
    # defining column headers
    output.write(
        "market_id,event_date,country,track,market_name,selection_id,selection_name,result,bsp,pp_min,pp_max,pp_wap,pp_ltp,pp_volume,ip_min,ip_max,ip_wap,ip_ltp,ip_volume\n"
    )

    for file_obj in load_markets(market_paths):
        stream = trading.streaming.create_historical_generator_stream(
            file_path=file_obj,
            listener=listener,
        )

        def get_pre_post_final(s):