def _load_telemetry(self): """Load telemetry data to be associated for each lap. """ tel, pos, date_offset = {}, {}, {} event_telemetry, lap_start_date = [], [] logging.info("Getting telemetry data...") car_data = api.car_data(self.api_path) logging.info("Getting position data...") position = api.position(self.api_path) logging.info("Resampling telemetry...") for driver in self.laps['DriverNumber'].unique(): if driver in car_data: tel[driver], date_offset[driver] = self._resample(car_data[driver]) else: logging.warning(f"Could not find telemetry data for driver {driver}") if driver in position: pos[driver], _ = self._resample(position[driver]) else: logging.warning(f"Could not find position data for driver {driver}") self.car_data, self.position = tel, pos can_find_reference = position != {} if can_find_reference: self._augment_position() d_map = {r['number']: r['Driver']['code'] for r in self.results} logging.info("Creating laps...") for i in self.laps.index: _log_progress(i, len(self.laps.index)) lap = self.laps.loc[i] driver = lap['DriverNumber'] if not pd.isnull(lap['LapTime']) and driver in tel: telemetry = self._slice_stream(tel[driver], lap) if len(telemetry.index): if driver in pos: telemetry = self._inject_position(pos[driver], lap, telemetry) telemetry = self._inject_space(telemetry) if can_find_reference: telemetry['DriverAhead'] = telemetry['DriverAhead'].map(d_map) event_telemetry.append(telemetry) # Calc lap start date lap_start_time = telemetry['SessionTime'].iloc[0] lap_start_date.append(date_offset[driver] + lap_start_time) else: logging.warning(f"Empty telemetry slice from lap {lap['LapNumber']} of driver {driver}") event_telemetry.append(None) lap_start_date.append(None) else: event_telemetry.append(None) lap_start_date.append(None) return event_telemetry, lap_start_date
def load_from_working_dir(year, gp, session, working_dir): pickle_path = os.path.join(working_dir, 'pickle_{}_{}_{}/'.format(year, gp, session)) if not os.path.exists(pickle_path): print('Data does not yet exist in working directory. Downloading...') os.makedirs(pickle_path) session = core.get_session(year, gp, session) pos = api.position(session.api_path) tel = api.car_data(session.api_path) laps_data, stream_data = api.timing_data(session.api_path) track = Track(pos) track.generate_track(visualization_frequency=250) pickle.dump(session, open(os.path.join(pickle_path, 'session'), 'wb')) pickle.dump(pos, open(os.path.join(pickle_path, 'pos'), 'wb')) pickle.dump(tel, open(os.path.join(pickle_path, 'tel'), 'wb')) pickle.dump(laps_data, open(os.path.join(pickle_path, 'laps_data'), 'wb')) pickle.dump(track, open(os.path.join(pickle_path, 'track'), 'wb')) print('Finished loading!') return session, pos, tel, laps_data, track else: print('Loading existing data from working directory...') session = pickle.load(open(os.path.join(pickle_path, 'session'), 'rb')) pos = pickle.load(open(os.path.join(pickle_path, 'pos'), 'rb')) tel = pickle.load(open(os.path.join(pickle_path, 'tel'), 'rb')) laps_data = pickle.load( open(os.path.join(pickle_path, 'laps_data'), 'rb')) track = pickle.load(open(os.path.join(pickle_path, 'track'), 'rb')) print('Finished loading!') return session, pos, tel, laps_data, track
def test_car_data(caplog): response = list() with open('fastf1/testing/reference_data/' '2020_05_FP2/car_data.raw', 'rb') as fobj: for line in fobj.readlines(): response.append(line.decode('utf-8-sig')) # parse data; api path is unused here so it does not need to be valid data = api.car_data('api/path', response=response) assert "failed to decode" not in caplog.text assert isinstance(data, dict) assert len(data) == 36 # 20 drivers and some problem with the raw data assert list(data.values())[0].shape == (27897, 9) # dataframe shape assert (list(data.values())[0].dtypes == [ 'timedelta64[ns]', 'datetime64[ns]', 'int64', 'int64', 'int64', 'int64', 'int64', 'int64', 'object' ]).all() response = response[:50] # use less samples to speed test up # truncate one response: missing data -> cannot be decoded response[10] = response[10][:20] # parse and verify that error message is logged data = api.position_data('api/path', response=response) assert "failed to decode" in caplog.text