示例#1
0
    def test_historical_stream(self):
        trading = betfairlightweight.APIClient("username",
                                               "password",
                                               app_key="appKey")
        stream = trading.streaming.create_historical_stream(
            file_path="tests/resources/historicaldata/RACE-1.140075353",
            listener=StreamListener(),
            operation="raceSubscription",
        )
        stream.start()

        for cache in stream.listener.stream._caches.values():
            cache.create_resource(1, False)

        assert stream.listener.stream_type == "raceSubscription"
        assert stream.listener.stream_unique_id == 0

        assert stream.listener.stream._updates_processed == 4
        assert len(stream.listener.stream._caches) == 2

        market = stream.listener.stream._caches.get("1.1234567")
        assert len(market.rrc) == 2

        market = stream.listener.stream._caches.get("1.173853449")
        assert len(market.rrc) == 4

        assert stream._running is False
示例#2
0
 def __init__(
     self,
     client: betfairlightweight.APIClient,
     market_filter: dict,
     market_data_filter: dict,
     conflate_ms: int = None,
     streaming_unique_id: int = 10000, # 10 seconds
 ):
     threading.Thread.__init__(self, daemon=True, name=self.__class__.__name__)
     self.client = client
     self.market_filter = market_filter
     self.market_data_filter = market_data_filter
     self.conflate_ms = conflate_ms
     self.streaming_unique_id = streaming_unique_id
     self.stream = None
     self.output_queue = queue.Queue()
     self.listener = StreamListener(output_queue=self.output_queue)
示例#3
0
    def test_historical_generator_stream(self):
        # assert that data is processed correctly (regression testing)
        trading = betfairlightweight.APIClient("username",
                                               "password",
                                               app_key="appKey")
        stream = trading.streaming.create_historical_generator_stream(
            file_path="tests/resources/historicaldata/BASIC-1.132153978",
            listener=StreamListener(lightweight=True),
        )
        gen = stream.get_generator()
        data = [i[0] for i in gen()]

        with open(
                "tests/resources/historicaldata/BASIC-1.132153978-processed.json",
                "r") as f:
            expected_data = load(f)

        assert expected_data == data
示例#4
0
    def test_historical_stream(self):
        trading = betfairlightweight.APIClient('username', 'password', app_key='appKey')
        stream = trading.streaming.create_historical_stream(
            directory='tests/resources/historicaldata/BASIC-1.132153978',
            listener=StreamListener()
        )
        stream.start(_async=False)

        assert stream.listener.stream_type == 'marketSubscription'
        assert stream.listener.stream_unique_id == 'HISTORICAL'
        assert stream.listener.clk == '3522512789'

        assert stream.listener.stream._updates_processed == 480
        assert len(stream.listener.stream._caches) == 1

        market = stream.listener.stream._caches.get('1.132153978')
        assert len(market.runners) == 14
        assert stream._running is False
示例#5
0
    def test_historical_stream(self):
        trading = betfairlightweight.APIClient("username",
                                               "password",
                                               app_key="appKey")
        stream = trading.streaming.create_historical_stream(
            file_path="tests/resources/historicaldata/BASIC-1.132153978",
            listener=StreamListener(),
        )
        stream.start()

        assert stream.listener.stream_type == "marketSubscription"
        assert stream.listener.stream_unique_id == 0
        assert stream.listener.clk == "3522512789"

        assert stream.listener.stream._updates_processed == 480
        assert len(stream.listener.stream._caches) == 1

        market = stream.listener.stream._caches.get("1.132153978")
        assert len(market.runners) == 14
        assert stream._running is False
示例#6
0
    def test_historical_stream(self):
        trading = betfairlightweight.APIClient("username",
                                               "password",
                                               app_key="appKey")
        stream = trading.streaming.create_historical_stream(
            file_path="tests/resources/historicaldata/CRICKET-1.179676557",
            listener=StreamListener(),
            operation="cricketSubscription",
        )
        stream.start()

        for cache in stream.listener.stream._caches.values():
            cache.create_resource(1, False)

        assert stream.listener.stream_type == "cricketSubscription"
        assert stream.listener.stream_unique_id == 0

        assert stream.listener.stream._updates_processed == 3
        assert len(stream.listener.stream._caches) == 1

        market = stream.listener.stream._caches.get("1.179676557")
        assert market is not None

        assert stream._running is False
示例#7
0
psql_dsn = {
    'host': 'localhost,s3.vpn.mele.law',
    'port': '5432',
    'user': '******',
    'dbname': 'betfair'
}
db = db.DB(psql_dsn=psql_dsn)

# setup logging
logging.basicConfig(level=logging.INFO)

# create trading instance (don't need username/password)
trading = betfairlightweight.APIClient("username", "password", app_key='')

# create listener
listener = StreamListener(max_latency=None)


def snapshot(_db, _market_book, _seconds_to_start):
    _raceid = int(_market_book.market_id.replace('1.1', '11'))
    _milliseconds_to_start = _seconds_to_start * 1000
    _event_type = int(_market_book.market_definition.event_type_id)
    _country_code = _market_book.market_definition.country_code
    try:
        _d = market_book.json().decode('utf-8')
    except:
        _d = market_book.json()

    if _db.data_exists(_raceid):
        logging.info(f'{_raceid} already inserted')
        return
basepath = './data/2020/11'
days_list = os.listdir(basepath)
days_list = [i for i in days_list
             if '.DS_Store' not in i]  # Filter out mac b.s.

# Configure this to be the number of seconds before the jump you want to grab the price at
offset = 600

# Main Execution starts here
files = get_file_names(basepath, days_list)

data = []
for f_name in files[:50]:
    print(f_name)
    f_pointer = bz2.BZ2File(f_name, 'rb')
    listener = StreamListener(max_latency=None, lightweight=True)
    listener.register_stream(0, "marketSubscription")
    # create historical stream (update directory to your file location)
    stream = HistoricalStreamMod(file_stream=f_pointer, listener=listener)
    gen = stream.get_generator()
    mb = get_book_json(gen)
    if mb:
        race_data = extract_data_json(mb)
        data = data + race_data

df = pd.DataFrame(data,
                  columns=[
                      'date', 'track', 'name', 'market_id', 'selection_id',
                      'selection_name', 'status', 'p1', 'p2', 'p3', 'p4', 'p5',
                      'v1', 'v2', 'v3', 'v4', 'v5', 'l1', 'l2', 'l3', 'l4',
                      'l5', 'lv1', 'lv2', 'lv3', 'lv4', 'lv5'