def test_history_mod_2(caplog): # api data sometimes goes back in time caplog.set_level(logging.INFO) session = core.get_session(2020, 3, 'FP2') session.load_laps() assert "The api attempted to rewrite history" in caplog.text # the error assert "Laps loaded and saved!" in caplog.text # indicates success
def test_history_mod_1(caplog): ff1.Cache.enable_cache("test_cache/") # api data sometimes goes back in time caplog.set_level(logging.INFO) session = core.get_session(2020, 'testing', 3) session.load_laps() assert "The api attempted to rewrite history" in caplog.text # the warning assert "Laps loaded and saved!" in caplog.text # indicates success
def test_no_manual_patch(caplog): # data can not be merged and no manual patch is available caplog.set_level(logging.INFO) session = core.get_session(2020, 3, 'FP2') session.load_laps() assert "Failed to merge timing data and timing app data for driver 16. " \ "No manual patch is available. Data for this driver will be missing!" in caplog.text # the error assert "Laps loaded and saved!" in caplog.text # indicates success
def test_manual_patch_file(caplog): # test if manual loading of patch file works caplog.set_level(logging.INFO) session = core.get_session(2020, 'testing', 3) session.load_laps() assert "Failed to merge timing data and timing app data for driver 5. " \ "A manual patch was loaded instead." in caplog.text # the warning assert "Laps loaded and saved!" in caplog.text # indicates success
def test_partial_position_data(caplog): # RUS is missing the first half of the position data because F1 somehow # switches from development driver to RUS mid-session # this requires recreating missing data (empty) so that the data has the correct size caplog.set_level(logging.INFO) session = core.get_session(2020, 'Barcelona', 'FP2') session.load_laps() assert "Car data for driver 63 is incomplete!" in caplog.text # the warning assert "Laps loaded and saved!" in caplog.text # indicates success
def test_ergast_lookup_fail(caplog): # ergast lookup fails if data is requested to soon after a session ends caplog.set_level(logging.INFO) def fail_load(): raise Exception core.ergast.load = fail_load # force function call to fail session = core.get_session( 2020, 3, 'FP2') # rainy and short session, good for fast test/quick loading session.load_laps() assert "Ergast lookup failed" in caplog.text # the warning assert "Laps loaded and saved!" in caplog.text # indicates success
def test_ergast_lookup_fail(caplog): ff1.Cache.enable_cache("test_cache/") # ergast lookup fails if data is requested to soon after a session ends caplog.set_level(logging.INFO) def fail_load(*args, **kwargs): raise Exception core.ergast.load = fail_load # force function call to fail session = core.get_session( 2020, 3, 'FP2') # rainy and short session, good for fast test/quick loading session.load_laps() assert "Failed to load data from Ergast API!" in caplog.text # the warning assert "Loaded data for" in caplog.text # indicates success
def test_cache_used_and_clear(tmpdir): utils.enable_cache(tmpdir) session = core.get_session( 2020, 3, 'FP2') # rainy and short session, good for fast test/quick loading session.load_laps() # requests cache and pickled result should now exist assert os.listdir(tmpdir) == [ '2020-07-19_Hungarian_Grand_Prix_2020-07-17_Practice_2_laps.pkl', 'fastf1_http_cache.sqlite' ] utils.clear_cache() # should delete pickle files assert os.listdir(tmpdir) == ['fastf1_http_cache.sqlite'] utils.clear_cache(deep=True) # should clear requests http cache assert os.path.getsize(os.path.join( tmpdir, 'fastf1_http_cache.sqlite')) < 100000 # 100kB
def load_from_working_dir(year, gp, session, working_dir): pickle_path = os.path.join(working_dir, 'pickle_{}_{}_{}/'.format(year, gp, session)) if not os.path.exists(pickle_path): print('Data does not yet exist in working directory. Downloading...') os.makedirs(pickle_path) session = core.get_session(year, gp, session) pos = api.position(session.api_path) tel = api.car_data(session.api_path) laps_data, stream_data = api.timing_data(session.api_path) track = Track(pos) track.generate_track(visualization_frequency=250) pickle.dump(session, open(os.path.join(pickle_path, 'session'), 'wb')) pickle.dump(pos, open(os.path.join(pickle_path, 'pos'), 'wb')) pickle.dump(tel, open(os.path.join(pickle_path, 'tel'), 'wb')) pickle.dump(laps_data, open(os.path.join(pickle_path, 'laps_data'), 'wb')) pickle.dump(track, open(os.path.join(pickle_path, 'track'), 'wb')) print('Finished loading!') return session, pos, tel, laps_data, track else: print('Loading existing data from working directory...') session = pickle.load(open(os.path.join(pickle_path, 'session'), 'rb')) pos = pickle.load(open(os.path.join(pickle_path, 'pos'), 'rb')) tel = pickle.load(open(os.path.join(pickle_path, 'tel'), 'rb')) laps_data = pickle.load( open(os.path.join(pickle_path, 'laps_data'), 'rb')) track = pickle.load(open(os.path.join(pickle_path, 'track'), 'rb')) print('Finished loading!') return session, pos, tel, laps_data, track