def test_component_field_accumulaters(self): # TODO: abstract CSV parsing csv_fp = open(testfile('compressed-speed-distance-records.csv'), 'r') csv_file = csv.reader(csv_fp) next(csv_file) # Consume header f = FitFile(testfile('compressed-speed-distance.fit')) f.parse() records = f.get_messages(name='record') empty_record = next(records) # Skip empty record for now (sets timestamp via header) # File's timestamp record is < 0x10000000, so field returns seconds self.assertEqual(empty_record.get_value('timestamp'), 17217864) # TODO: update using local_timestamp as offset, since we have this value as 2012 date for count, (record, (timestamp, heartrate, speed, distance, cadence)) in enumerate(zip(records, csv_file)): # No fancy datetime stuff, since timestamp record is < 0x10000000 fit_ts = record.get_value('timestamp') self.assertIsInstance(fit_ts, int) self.assertLess(fit_ts, 0x10000000) self.assertEqual(fit_ts, int(timestamp)) self.assertEqual(record.get_value('heart_rate'), int(heartrate)) self.assertEqual(record.get_value('cadence'), int(cadence) if cadence != 'null' else None) self.assertAlmostEqual(record.get_value('speed'), float(speed)) self.assertAlmostEqual(record.get_value('distance'), float(distance)) self.assertEqual(count, 753) # TODO: confirm size(records) = size(csv) csv_fp.close()
def main(argv): parser = argparse.ArgumentParser() parser.add_argument("-f", "--fitfile", type=str, default="", help="FIT file") parser.add_argument("-p", "--ftp", type=float, default=4.5, help="Functional Threshold Pace") parser.add_argument("-t", "--threshold", type=int, default=155, help="Heartrate threshold") args = parser.parse_args() user = User(args.ftp, args.threshold) try: fitfile = FitFile(args.fitfile) fitfile.parse() except FitParseError, e: print "Error while parsing .FIT file: %s" % e sys.exit(1)
def upload_file(self,driver,file): f = FitFile(file) f.parse() records = list(f.get_messages(name='record')) self.distance = records[-1].get_value('distance') / 1609.347219 self.time = str(records[-1].get_value('timestamp') - records[0].get_value('timestamp')).split(":") return True
def test_basic_file_with_one_record(self, endian='<'): f = FitFile(generate_fitfile(endian=endian)) f.parse() self.assertEqual(f.profile_version, 1.52) self.assertEqual(f.protocol_version, 1.0) file_id = f.messages[0] self.assertEqual(file_id.name, 'file_id') for field in ('type', 0): self.assertEqual(file_id.get_value(field), 'activity') self.assertEqual(file_id.get(field).raw_value, 4) for field in ('manufacturer', 1): self.assertEqual(file_id.get_value(field), 'garmin') self.assertEqual(file_id.get(field).raw_value, 1) for field in ('product', 'garmin_product', 2): self.assertEqual(file_id.get_value(field), 'edge500') self.assertEqual(file_id.get(field).raw_value, 1036) for field in ('serial_number', 3): self.assertEqual(file_id.get_value(field), 558069241) for field in ('time_created', 4): self.assertEqual(file_id.get_value(field), secs_to_dt(723842606)) self.assertEqual(file_id.get(field).raw_value, 723842606) for field in ('number', 5): self.assertEqual(file_id.get_value(field), None)
def test_component_field_resolves_subfield(self): fit_data = generate_fitfile( generate_messages( # event (21), local message 1 mesg_num=21, local_mesg_num=1, field_defs=[ # event, event_type, data16 (0, 'enum'), (1, 'enum'), (2, 'uint16'), ], data=[[0, 0, 2]], ) ) f = FitFile(fit_data) f.parse() event = f.messages[1] self.assertEqual(event.name, 'event') for field in ('event', 0): self.assertEqual(event.get_value(field), 'timer') self.assertEqual(event.get(field).raw_value, 0) for field in ('event_type', 1): self.assertEqual(event.get_value(field), 'start') self.assertEqual(event.get(field).raw_value, 0) # Should be able to reference by original field name, # component field name, subfield name, and then the field def_num of both # the original field and component field for field in ('timer_trigger', 'data', 3): self.assertEqual(event.get_value(field), 'fitness_equipment') self.assertEqual(event.get(field).raw_value, 2) # Component field should be left as is for field in ('data16', 2): self.assertEqual(event.get_value(field), 2)
def test_mismatched_field_size(self): f = FitFile(testfile('coros-pace-2-cycling-misaligned-fields.fit')) with warnings.catch_warnings(record=True) as w: f.parse() assert w assert all("falling back to byte encoding" in str(x) for x in w) self.assertEqual(len(f.messages), 11293)
def fit_decode(file2parse): try: fitfile = FitFile(file2parse) fitfile.parse() all_fit_show(fitfile) except FitParseError as e: print("Error while parsing .FIT file: %s % e") sys.exit(1)
def convert(filename): document = create_document() element = create_sub_element(document.getroot(), "Activities") activity = FitFile(filename) activity.parse() add_activity(element, activity) return document
def test_subfield_components(self): # sore = 123, opponent_score = 456, total = 29884539 sport_point_value = 123 + (456 << 16) # rear_gear_num = 4, rear_gear, = 20, front_gear_num = 2, front_gear = 34 gear_chance_value = 4 + (20 << 8) + (2 << 16) + (34 << 24) fit_data = generate_fitfile( generate_messages( # event (21), local message 1 mesg_num=21, local_mesg_num=1, field_defs=[ # event, data (0, 'enum'), (3, 'uint32'), ], data=[ # sport point [33, sport_point_value], # front gear change [42, gear_chance_value], ], )) f = FitFile(fit_data) f.parse() sport_point = f.messages[1] self.assertEqual(sport_point.name, 'event') for field in ('event', 0): self.assertEqual(sport_point.get_value(field), 'sport_point') self.assertEqual(sport_point.get(field).raw_value, 33) for field in ('sport_point', 'data', 3): # Verify raw numeric value self.assertEqual(sport_point.get_value(field), sport_point_value) for field in ('score', 7): self.assertEqual(sport_point.get_value(field), 123) for field in ('opponent_score', 8): self.assertEqual(sport_point.get_value(field), 456) gear_change = f.messages[2] self.assertEqual(gear_change.name, 'event') for field in ('event', 0): self.assertEqual(gear_change.get_value(field), 'front_gear_change') self.assertEqual(gear_change.get(field).raw_value, 42) for field in ('gear_change_data', 'data', 3): # Verify raw numeric value self.assertEqual(gear_change.get_value(field), gear_chance_value) for field in ('front_gear_num', 9): self.assertEqual(gear_change.get_value(field), 2) for field in ('front_gear', 10): self.assertEqual(gear_change.get_value(field), 34) for field in ('rear_gear_num', 11): self.assertEqual(gear_change.get_value(field), 4) for field in ('rear_gear', 12): self.assertEqual(gear_change.get_value(field), 20)
def test_subfield_components(self): # sore = 123, opponent_score = 456, total = 29884539 sport_point_value = 123 + (456 << 16) # rear_gear_num = 4, rear_gear, = 20, front_gear_num = 2, front_gear = 34 gear_chance_value = 4 + (20 << 8) + (2 << 16) + (34 << 24) fit_data = generate_fitfile( generate_messages( # event (21), local message 1 mesg_num=21, local_mesg_num=1, field_defs=[ # event, data (0, 'enum'), (3, 'uint32'), ], data=[ # sport point [33, sport_point_value], # front gear change [42, gear_chance_value], ], ) ) f = FitFile(fit_data) f.parse() sport_point = f.messages[1] self.assertEqual(sport_point.name, 'event') for field in ('event', 0): self.assertEqual(sport_point.get_value(field), 'sport_point') self.assertEqual(sport_point.get(field).raw_value, 33) for field in ('sport_point', 'data', 3): # Verify raw numeric value self.assertEqual(sport_point.get_value(field), sport_point_value) for field in ('score', 7): self.assertEqual(sport_point.get_value(field), 123) for field in ('opponent_score', 8): self.assertEqual(sport_point.get_value(field), 456) gear_change = f.messages[2] self.assertEqual(gear_change.name, 'event') for field in ('event', 0): self.assertEqual(gear_change.get_value(field), 'front_gear_change') self.assertEqual(gear_change.get(field).raw_value, 42) for field in ('gear_change_data', 'data', 3): # Verify raw numeric value self.assertEqual(gear_change.get_value(field), gear_chance_value) for field in ('front_gear_num', 9): self.assertEqual(gear_change.get_value(field), 2) for field in ('front_gear', 10): self.assertEqual(gear_change.get_value(field), 34) for field in ('rear_gear_num', 11): self.assertEqual(gear_change.get_value(field), 4) for field in ('rear_gear', 12): self.assertEqual(gear_change.get_value(field), 20)
def readff(ffname=None) : if ffname is None: print("No fit file name given") sys.exit(1) try: fitfile = FitFile(ffname) fitfile.parse() except FitParseError as e: print("Error while parsing .FIT file: %s" % e) sys.exit(1) return fitfile
def load_power_from_fit(filename): """ Method to open the power data from FIT file into a numpy array. Parameters ---------- filename: str, Path to the FIT file. """ # Check that the filename has the good extension if filename.endswith('.fit') is not True: raise ValueError('The file does not have the right extension.' ' Expected *.fit.') # Check if the file exists if isfile(filename) is not True: raise ValueError('The file does not exist. Please check the path.') # Create an object to open the activity activity = FitFile(filename) activity.parse() # Get only the power records records = list(activity.get_messages(name='record')) # Check that you have some records to analyse if len(records) == 0: raise ValueError('There is no data inside the FIT file.') # Append the different values inside a list which will be later # converted to numpy array power_rec = np.zeros((len(records), )) # Go through each record # In order to send multiple warnings warnings.simplefilter('always', UserWarning) warn_sample = 0 for idx_rec, rec in enumerate(records): # Extract only the value regarding the power p = rec.get_value('power') if p is not None: power_rec[idx_rec] = float(p) else: # We put the value to 0 since that it will not influence # the computation of the RPP power_rec[idx_rec] = 0. # We keep track of the number of inconsitent data warn_sample += 1 # Through a warning if there is no power data found if len(records) == warn_sample: warnings.warn('This file does not contain any power data.' 'Be aware.') return power_rec
class TrackFit(TrackInterface): def __init__(self, filename): super() self.fitfile = FitFile(filename, data_processor=StandardUnitsDataProcessor()) self.fitfile.parse() def start_location(self): """Searches for the first record with latitude and longitude coordinates. Returns: A Location object. None if latitude and longitude not found. """ for record in self.fitfile.messages: if not record.name == 'record': continue location = record_location(record) if location: return location return None def end_location(self): """Searches for the last record with latitude and longitude coordinates. Returns: A Location object. None if latitude and longitude not found. """ for record in reversed(self.fitfile.messages): if not record.name == 'record': continue location = record_location(record) if location: return location return None def next_location(self): for record in self.fitfile.messages: if not record.name == 'record': continue location = record_location(record) if location: yield location return None
def main(): if len(sys.argv) == 3: filename = sys.argv[1] output_filename = sys.argv[2] else: print 'Usage: {0} [FIT input file] [HTML output file]'.format(sys.argv[0]) return moving_window = 1 fit = FitFile(filename) fit.parse() # Gets the start timestamp start_time = None for message in fit.get_messages(name = 'record'): start_time = message.get_value('timestamp') break last_rmssd_time = None hrv_points = [] datapoint = None hrv_results = [["Duration", "Avg HR", "HRV"]] for message in fit.messages: if message.mesg_num == 20: if datapoint != None and datapoint.has_rr(): hrv_points.append(datapoint) if datapoints_duration_in_minutes(hrv_points) > moving_window: if last_rmssd_time == None or (hrv_points[-1].timestamp - last_rmssd_time).total_seconds() / 60 > 0.5 * moving_window: last_rmssd_time = hrv_points[-1].timestamp result = rmssd(hrv_points, moving_window) if result != None: hrv_results.append([(result[0] - start_time).total_seconds(), result[1], result[2]]) datapoint = _HrvPoint(message.get_value('timestamp'), message.get_value('heart_rate')) elif message.mesg_num == 78: if datapoint != None: datapoint.add_hrv(message.get_value('time')) elif message.name == 'event': print '{0} Event: {1} {2} {3}'.format( message.get_value('timestamp'), message.get_value('event'), message.get_value('event_type'), message.get_value('data')) elif message.name in ['session', 'lap']: print '{0} {1}'.format(message.get_value('timestamp'), message.name) for f in message.fields: if f.value is None or f.name in ['timestamp']: continue print ' {0} : {1}'.format(f.name, f.value) with open('fit_hrv_template.html', 'r') as template: with open(output_filename, 'w') as output: output.write(template.read().replace("%HRVDATA%", json.dumps(hrv_results)))
def open_fit(self, filename): self.filename = filename a = FitFile(filename) a.parse() records = list(a.get_messages(name='record')) row = len(records) if row != 0: keys = [] self.d = np.ones(row, dtype={'names': ['ok'], 'formats': ['bool']}) #parse first record for field in records[0]: if field.type.name == 'date_time': keys.append((field.name, 'a20')) else: keys.append((field.name, 'float')) for key, typ in (keys): #some files edited by fitfiletools (https://www.fitfiletools.com/) (eg remover) may have some strange structure, with twice the same field! try: self.d = npr.rec_append_fields(self.d, key, np.zeros(row), typ) self.scale[key] = 1.0 self.unit[key] = "SI" except: pass idx = 0 for r in a.get_messages(name='record'): for f in r.fields: if f.type.name == 'date_time': self.d[f.name][idx] = f.value.isoformat() + 'Z' else: try: self.d[f.name][idx] = float(f.value) except: #print("invalid data", f.name, idx,f.data) self.d[f.name][idx] = -1.0 idx += 1 #we now have to convert lat and lon self.d['position_lat'] /= 11930464.71 self.d['position_long'] /= 11930464.71 #new version of fitparse gives speed in mm/s self.d['speed'] /= 1000 self.move_column('position_lat', 'lat') self.move_column('position_long', 'lon') self.move_column('altitude', 'ele') self.move_column('timestamp', 'time') self.append_column('idx', 'int') self['idx'] = np.arange(self.get_row_count())
def load_power_from_fit(filename): """Method to open the power data from FIT file into a pandas dataframe. Parameters ---------- filename : str, Path to the FIT file. Returns ------- data : DataFrame Power records of the ride. """ filename = check_filename_fit(filename) activity = FitFile(filename) activity.parse() records = activity.get_messages(name='record') data = defaultdict(list) for rec in records: values = rec.get_values() for key in FIELDS_DATA: data[key].append(values.get(key, np.NaN)) data = pd.DataFrame(data) if data.empty: raise IOError( 'The file {} does not contain any data.'.format(filename)) # rename the columns for consistency data.rename(columns={ 'heart_rate': 'heart-rate', 'altitude': 'elevation' }, inplace=True) data.set_index(FIELDS_DATA[0], inplace=True) del data.index.name return data
def read_data(self, filename): fitfile_path = os.path.join('data/', filename) print(fitfile_path) fitfile = FitFile(fitfile_path) fitfile.parse() records = list(fitfile.get_messages(name='record')) r0 = records[0] for field in r0: self.data[field.name] = [] print("field: ", field.name, "units: ", field.units) if field.name == 'timestamp': t0 = field.value for r in records[1:]: for field in r: if field.name == 'timestamp': t = field.value - t0 self.data['timestamp'].append(t.total_seconds()) for r in records[1:]: for field in r: if field.name != 'timestamp': if field.name in self.data: self.data[field.name].append(field.value) else: self.data[field.name] = [] self.data[field.name].append(field.value) for k in self.data: v = np.array(self.data[k]) print(k + " shape:", v.shape) self.data[k] = v self.data["timedelta"] = np.array([0]) self.data["timedelta"] = np.hstack( (self.data["timedelta"], np.diff(self.data["timestamp"]))) self.data_length = len(self.data["timestamp"])
def parse_fit_file(self): fitfile = FitFile(self.filename) fitfile.parse() records = fitfile.get_messages(name='record') records_dict = {} for record in records: for field in record.fields: if field.name not in records_dict.keys(): records_dict[field.name] = [] records_dict[field.name].append(field.value) records_dict.pop('unknown_87', None) records_dict.pop('unknown_88', None) records_dict.pop('unknown_90', None) df = pd.DataFrame.from_dict(records_dict) self.df = self.normalize_df(df)
def load_power_from_fit(filename): """Method to open the power data from FIT file into a pandas dataframe. Parameters ---------- filename : str, Path to the FIT file. Returns ------- data : DataFrame Power records of the ride. """ filename = check_filename_fit(filename) activity = FitFile(filename) activity.parse() records = activity.get_messages(name='record') data = defaultdict(list) for rec in records: values = rec.get_values() for key in FIELDS_DATA: data[key].append(values.get(key, np.NaN)) data = pd.DataFrame(data) if data.empty: raise IOError('The file {} does not contain any data.'.format( filename)) # rename the columns for consistency data.rename(columns={'heart_rate': 'heart-rate', 'altitude': 'elevation'}, inplace=True) data.set_index(FIELDS_DATA[0], inplace=True) del data.index.name return data
def test_developer_data_thread_safe(self): """ Test that a file with developer types in it can be parsed thread-safe. This test opens 2 FIT files and tests whether the dev_types of one does not change the dev_types of the other. """ fit_file_1 = FitFile(testfile('developer-types-sample.fit')) field_description_count = 0 for message in fit_file_1.get_messages(): if message.mesg_type.name == "field_description": field_description_count += 1 if field_description_count >= 4: # Break after final field description message break fit_file_2 = FitFile(testfile('developer-types-sample.fit')) for message in fit_file_2.get_messages(): if message.mesg_type.name == "developer_data_id": break try: fit_file_1.parse() except FitParseError: self.fail("parse() unexpectedly raised a FitParseError")
def get_weather(self,fit_file): ''' ''' f = FitFile(fit_file) f.parse() records = list(f.get_messages(name='record')) lat = records[0].get_value('position_lat') long = records[0].get_value('position_long') lat = str(lat * (180.0 / 2**31)) long = str(long * (180.0 / 2**31)) #Grab all the resources url = "http://api.wunderground.com/api/bdf13372b1f7e319/conditions/q/"+lat+","+long+".json" r = requests.get(url) j = r.json() return {'weather_string':j['current_observation']['weather'] + " " + \ j['current_observation']['temperature_string'] + " " +\ j['current_observation']['wind_string'], 'temp_f':j['current_observation']['temp_f'], 'temp_c':j['current_observation']['temp_c'], 'humidity':j['current_observation']['relative_humidity'], 'wind_speed':j['current_observation']['wind_mph'], 'wind_gust_mph':j['current_observation']['wind_gust_mph']}
def convert(filename, tz_is_local=False, dist_recalc=False, speed_recalc=False, calibrate=False, per_lap_cal=False, manual_lap_distance=None, current_cal_factor=100.0): """Convert a FIT file to TCX format""" # Calibration with manual lap distances implies # per-lap calibration: if calibrate and manual_lap_distance is not None: per_lap_cal = True document = create_document() element = create_sub_element(document.getroot(), "Activities") try: activity = FitFile(filename, data_processor=MyDataProcessor()) activity.parse() if tz_is_local: lat = None lon = None for trackpoint in activity.get_messages('record'): if lat is not None and lon is not None: break lat = trackpoint.get_value("position_lat") lon = trackpoint.get_value("position_long") activity = FitFile(filename, data_processor=TZDataProcessor(lat=lat, lon=lon)) activity.parse() session = next(activity.get_messages('session')) total_activity_distance = session.get_value('total_distance') total_calculated_distance = sum_distance(activity) activity_scaling_factor = (total_calculated_distance / total_activity_distance) new_cal_factor = activity_scaling_factor * current_cal_factor actelem, total_distance = add_activity(element, session, activity, dist_recalc, speed_recalc, calibrate, current_cal_factor, per_lap_cal, manual_lap_distance, activity_scaling_factor) except FitParseError as e: sys.stderr.write(str("Error while parsing .FIT file: %s" % e) + "\n") sys.exit(1) if dist_recalc: distance_used = total_calculated_distance elif calibrate: distance_used = total_distance else: distance_used = total_activity_distance method = "" if dist_recalc or speed_recalc or calibrate: parts = [] if calibrate: if per_lap_cal: parts.append("calibration applied per lap") else: parts.append("calibration applied") if dist_recalc and speed_recalc: parts.append("speed and distance recalculated") elif dist_recalc: parts.append("distance recalculated") elif speed_recalc: parts.append("speed recalculated") if calibrate and manual_lap_distance is not None: reference = " from known distance and/or GPS" elif dist_recalc or speed_recalc: reference = " from GPS" method = "(" + ", ".join(parts) + reference + ")" notes = ( "{distance_used:.3f} km in {total_time!s} {dist_method:s}\n" "Distance in FIT file: {fit_dist:.3f} km; " "calculated via GPS: {gps_dist:.3f} km " "(precision: {precision:.1f}%)\n" "Footpod calibration factor setting: {old_cf:.1f}%; " "new factor based on recomputed distance: {new_cf:.1f}%" ).format( distance_used=distance_used / 1000, total_time=timedelta( seconds=int(session.get_value('total_timer_time'))), fit_dist=total_activity_distance / 1000, gps_dist=total_calculated_distance / 1000, precision=(1 - (abs(total_calculated_distance - total_activity_distance) / total_calculated_distance)) * 100, old_cf=current_cal_factor, new_cf=new_cal_factor, dist_method=method) add_notes(actelem, notes) add_creator(actelem, activity.messages[0].get_value('manufacturer').capitalize(), activity.messages[0].get_value('product')) add_author(document) return document
def main(argv): parser = argparse.ArgumentParser() parser.add_argument("-f", "--fitfile", type=str, default="", help="FIT file") parser.add_argument("-p", "--ftp", type=int, default=276, help="Functional Threshold Power") parser.add_argument("-t", "--threshold", type=int, default=158, help="Heartrate threshold") parser.add_argument("-w", "--weight", type=float, default=82.5, help="Weight in kg") args = parser.parse_args() user = User(args.ftp, args.threshold, args.weight) try: fitfile = FitFile(args.fitfile) fitfile.parse() except FitParseError: print("Error while parsing .FIT file: %s" % args.fitfile) sys.exit(1) power = [] heartrate = [] cadence = [] start = "" stop = "" distance = 0 # Get all data messages that are of type record for record in fitfile.get_messages('record'): # Go through all the data entries in this record for data in record: if data.name == "timestamp": if not start: start = data.value else: stop = data.value if data.name == "power": power.append(data.value) if data.name == "heart_rate": heartrate.append(data.value) if data.name == "cadence": cadence.append(data.value) if data.name == "distance": if data.value != None: distance = data.value duration = stop - start print("Duration: %s" % (duration)) print("Distance: %.2f km" % (distance/1000.0)) p = [i for i in power if i is not None] hr = [i for i in heartrate if i is not None] c = [i for i in cadence if i is not None] if hr: hr_tss, hr_avg, hr_max, hr_min = get_hrTSS(hr, duration.total_seconds(), user.hr_ftp) print("Heartrate: TSS: %.1f, AVG: %d, MAX: %d, MIN: %d" % (hr_tss, hr_avg, hr_max, hr_min)) if p: tss, NP, IFactor = get_TSS(p, len(power), user.ftp) print("Power: TSS: %.1f" % (tss)) print(" IF: %.2f" % (IFactor)) print(" NP: %d W" % (NP)) print(" AVG: %d W" % (average(power))) print(" MAX: %d W" % (max(p))) if hr: print(" NP:HR: %.2f" % (NP/hr_avg)) if c: print("Cadence: AVG: %d, MAX: %d" % (average(cadence), max(c))) if p: print("") print("VO2max: %.2f ml/kg/min" % (vo2max(max_power(p, 5*60), user.weight))) print("") print("Max power:") print(" peak: %d W" % (max(p))) print(" 10s: %d W" % (max_power(p, 10))) print(" 30s: %d W" % (max_power(p, 30))) print(" 1m: %d W" % (max_power(p, 1*60))) print(" 3m: %d W" % (max_power(p, 3*60))) print(" 5m: %d W" % (max_power(p, 5*60))) print(" 8m: %d W" % (max_power(p, 8*60))) if (len(p) > 10*60): print(" 10m: %d W" % (max_power(p, 10*60))) if (len(p) > 20*60): print(" 20m: %d W" % (max_power(p, 20*60))) if (len(p) > 1*60*60): print(" 1h: %d W" % (max_power(p, 1*60*60))) if (len(p) > 2*60*60): print(" 2h: %d W" % (max_power(p, 2*60*60))) if (len(p) > 3*60*60): print(" 3h: %d W" % (max_power(p, 3*60*60)))
def convert(filename, time_zone="auto", dist_recalc=False, speed_recalc=False, calibrate=False, per_lap_cal=False, manual_lap_distance=None, current_cal_factor=100.0): """Convert a FIT file to TCX format""" # Calibration requires either GPS recalculation or manual lap distance(s): if calibrate and not dist_recalc and manual_lap_distance is None: sys.stderr.write("Calibration requested, enabling distance recalculation from GPS/footpod.\n") dist_recalc = True # Calibration with manual lap distances implies # per-lap calibration: if calibrate and manual_lap_distance is not None: per_lap_cal = True document = create_document() element = create_sub_element(document.getroot(), "Activities") try: if time_zone == "auto": # We need activity object to be able to get trackpoints, # before re-creating activity again with timezone info activity = FitFile(filename, check_crc=False, data_processor=MyDataProcessor()) activity.parse() lat = None lon = None for trackpoint in activity.get_messages('record'): if lat is not None and lon is not None: break lat = trackpoint.get_value("position_lat") lon = trackpoint.get_value("position_long") if lat is not None and lon is not None: activity = FitFile(filename, check_crc=False, data_processor=TZDataProcessor(lat=lat, lon=lon)) else: activity = FitFile(filename, check_crc=False, data_processor=TZDataProcessor(tzname=time_zone)) activity.parse() session = next(activity.get_messages('session')) total_activity_distance = session.get_value('total_distance') total_calculated_distance = sum_distance(activity) activity_scaling_factor = (total_calculated_distance / total_activity_distance) new_cal_factor = activity_scaling_factor * current_cal_factor actelem, total_distance = add_activity(element, session, activity, dist_recalc, speed_recalc, calibrate, current_cal_factor, per_lap_cal, manual_lap_distance, activity_scaling_factor) except FitParseError as e: sys.stderr.write(str("Error while parsing .FIT file: %s" % e) + "\n") sys.exit(1) if dist_recalc: distance_used = total_calculated_distance elif calibrate: distance_used = total_distance else: distance_used = total_activity_distance method = "" if dist_recalc or speed_recalc or calibrate: parts = [] if calibrate: if per_lap_cal: parts.append("calibration applied per lap") else: parts.append("calibration applied") if dist_recalc and speed_recalc: parts.append("speed and distance recalculated") elif dist_recalc: parts.append("distance recalculated") elif speed_recalc: parts.append("speed recalculated") if calibrate and manual_lap_distance is not None: reference = " from known distance (with GPS fill-in)" elif dist_recalc or speed_recalc: reference = " from GPS/footpod" method = "(" + ", ".join(parts) + reference + ")" notes = ("{total_laps:d} laps: {distance_used:.3f} km in {total_time!s} {dist_method:s}\n" "Distance in FIT file: {fit_dist:.3f} km; " "calculated via GPS/footpod: {gps_dist:.3f} km " "(precision: {precision:.1f}%)\n" "Footpod calibration factor setting: {old_cf:.1f}%; " "new factor based on recomputed distance: {new_cf:.1f}%" ).format(total_laps=session.get_value('num_laps'), distance_used=distance_used / 1000, total_time=timedelta(seconds=int(session.get_value( 'total_timer_time'))), fit_dist=total_activity_distance / 1000, gps_dist=total_calculated_distance / 1000, precision=(1 - (abs(total_calculated_distance - total_activity_distance) / total_calculated_distance)) * 100, old_cf=current_cal_factor, new_cf=new_cal_factor, dist_method=method) add_notes(actelem, notes) try: dinfo = next(activity.get_messages('device_info')) manufacturer = dinfo.get_value('manufacturer').title().replace('_', ' ') product_name = dinfo.get_value('descriptor').replace('_', ' ') product_id = dinfo.get_value('product') serial_number = dinfo.get_value('serial_number') except: # if no device_info message, StopIteration is thrown fid = next(activity.get_messages('file_id')) manufacturer = fid.get_value('manufacturer').title().replace('_', ' ') product_id = fid.get_value('product') product_name = PRODUCT_MAP[product_id] if product_id in PRODUCT_MAP else product_id serial_number = fid.get_value('serial_number') add_creator(actelem, manufacturer, product_name, product_id, serial_number ) add_author(document) return document
def test_unterminated_file(self): f = FitFile(testfile('nick.fit'), check_crc=False) with warnings.catch_warnings(record=True) as w: f.parse()
def loadFIT(fitFilename): fitFileStructure = FitFile(fitFilename) fitFileStructure.parse() return fitFileStructure
GOOGLE_API_KEY='AIzaSyCBFTk2JuESrKlpab-LYNwG56zXI6eUka8' # We don't really use WU anymore #WEATHERUNDERGROUND_API_KEY='01c440c2fae9b2b2' parser = argparse.ArgumentParser(description='Process some integers.') parser.add_argument('file', type=argparse.FileType('r'), nargs=1, help='File to be parsed (.fit)') args = parser.parse_args() input_file_path = Path(args.file[0].name) with open(input_file_path, 'rb') as input_file: try: fit_file = FitFile(input_file, check_crc=False) fit_file.parse() except FitParseError as err: print('Error while parsing {}: {}'.format(input_file.relpath(), err)) sys.exit(1) # Build our api instances gmaps = googlemaps.Client(key=GOOGLE_API_KEY) tf = TimezoneFinder() moments = [] j=0 for i, record in enumerate(fit_file.messages): if record.mesg_num == 20: lat = convert_to_degrees(record.get_value('position_lat')) lng = convert_to_degrees(record.get_value('position_long')) if lat and lng:
from fitparse import FitFile name = 'testfile.FIT' fitfile = FitFile(name) fitfile.parse() records = list(fitfile.get_messages()) for r in records: print('==> {}'.format(r)) for k, v in r.get_values().items(): print(' {} = {}'.format(k, v))
def main(): global verbosity, simulation starttime = time.time() #nargs="*",'--fit_files_or_folder','-f','--fit_files_or_folder', dest = 'fit_files_or_folder' parser = argparse.ArgumentParser( description='The fitfilerenamer tool', epilog='%(prog)s {version}'.format(version=__version__)) parser.add_argument('-v', '--verbosity', type=int, choices=range(0, 2), default=1, help='0= silent, 1= a bit output, 2= many output') parser.add_argument('fit_files_or_folder', nargs="*", help='w/o default Dir is used') parser.add_argument('-s', '--simulation', action='store_true', help='simulation without renaming any file') parser.add_argument('-i', '--ignorecrc', action='store_false', help='no crc check') arguments = vars(parser.parse_args()) args = arguments['fit_files_or_folder'] verbosity = arguments['verbosity'] ignorecrc = arguments['ignorecrc'] simulation = arguments['simulation'] # (optionen, args) = parser.parse_args() #Iprint('Argumentlength %s' % (len(args))) if ROA: Dprint('Android (qpython) detectet') droid = sl4a.Android() else: Dprint('Android not detectet') if len(args) == 1: Dprint("Looking for File or Directory: %s" % args[0]) if args[0][-4:].lower() == '.fit' and os.path.isfile( args[0] ): # if the one argument is a file, create a list with one entry filelist = [args[0]] elif os.path.isdir( args[0] ): #if the one argument is a dir, create a list with the fit files Dprint("argument given, it is a directory: %s" % (args[0])) filelist = create_filelist(args[0]) else: Iprint('argument given, but nothing found') final_message('wait %d sec or press strg c' % (WAIT_AFTER_JOB_DONE)) sys.exit(1) elif len(args) == 0: # no argument given Dprint('No argument, looking at default Path: %s' % (FIT_DEFAULT_PATH)) if os.path.isdir(FIT_DEFAULT_PATH): Dprint('No argument, but default path exist: %s' % (FIT_DEFAULT_PATH)) filelist = create_filelist(FIT_DEFAULT_PATH) else: # no argument and no defaultlocation found Iprint("No Argument given and Defaultlocation does not exist: %s" % (FIT_DEFAULT_PATH)) final_message('wait %d sec or press strg c' % (WAIT_AFTER_JOB_DONE)) sys.exit(2) else: # more than 1 arguments, todo: filtering *.fit Dprint('much arguments. %d' % (len(args))) filelist = [] for next_file in args: if next_file[-4:].lower() == '.fit' and os.path.isfile(next_file): Dprint('file %s' % (next_file)) filelist.append(next_file) if len(filelist) == 0: Iprint('Arguments given but not a fitfiles found') final_message('wait %d sec or press strg c' % (WAIT_AFTER_JOB_DONE)) sys.exit(6) Dprint('fitfiles: %s' % (filelist)) n = len(filelist) if ROA: # create progressbar for download droid.dialogCreateHorizontalProgress('Analyzing and Renaming', 'please be patient', n) droid.dialogShow() Dprint('creating progressbar') Iprint('please be patient, i am parsing. This can take a minute') file_count = skipped_count = renamed_count = simulated_count = skipped_defective_count = 0 for file in filelist: Dprint('processing %s' % (file)) Dprint('start datafitprocessor') if not os.path.isfile(file): Iprint('skipping folder: %s' % (file)) continue try: fitfile = FitFile(file, check_crc=ignorecrc) Dprint('parsing start') fitfile.parse() Dprint('parsing done') except FitParseError as e: Iprint('skipping defective fitfile %s' % (file)) skipped_defective_count += 1 for m in e.args: Dprint('Exception: %s' % (m)) continue #Dprint('rename arguments: %s , %s , %d' % (fitfile, file, file_count)) renamestatus = rename_fitfile(fitfile, file, file_count) if renamestatus == 'renamed': renamed_count += 1 elif renamestatus == 'simulated_renaming': simulated_count += 1 elif renamestatus == 'skipped': skipped_count += 1 if ROA: droid.dialogSetCurrentProgress(file_count + 1) file_count += 1 difftime = time.time() - starttime Iprint('finished processing %d file(s) in %d seconds' % (file_count, difftime)) summary = 'renamed: %d, simulated: %d, skipped existing: %d, skipped defective: %d' % ( renamed_count, simulated_count, skipped_count, skipped_defective_count) Iprint(summary) if ROA: droid.dialogDismiss() title = 'I have processed %d File(s) in %d seconds' % (file_count, difftime) #droid.makeToast(title) #droid.ttsSpeak(title) #summary = 'renamed: %d, simulated: %d, skipped existing: %d, skipped defective: %d' % (renamed_count, simulated_count, skipped_count, skipped_defective_count) droid.dialogCreateAlert(title, summary) droid.dialogSetPositiveButtonText('OK') droid.dialogShow() dummy = droid.dialogGetResponse().result else: final_message('wait %d sec or press strg c' % (WAIT_AFTER_JOB_DONE))
def convert( filename, tz_is_local=False, dist_recalc=False, speed_recalc=False, calibrate=False, per_lap_cal=False, manual_lap_distance=None, current_cal_factor=100.0, ): """Convert a FIT file to TCX format""" # Calibration with manual lap distances implies # per-lap calibration: if calibrate and manual_lap_distance is not None: per_lap_cal = True document = create_document() element = create_sub_element(document.getroot(), "Activities") try: activity = FitFile(filename, data_processor=MyDataProcessor()) activity.parse() if tz_is_local: lat = None lon = None for trackpoint in activity.get_messages("record"): if lat is not None and lon is not None: break lat = trackpoint.get_value("position_lat") lon = trackpoint.get_value("position_long") activity = FitFile(filename, data_processor=TZDataProcessor(lat=lat, lon=lon)) activity.parse() session = next(activity.get_messages("session")) total_activity_distance = session.get_value("total_distance") total_calculated_distance = sum_distance(activity) activity_scaling_factor = total_calculated_distance / total_activity_distance new_cal_factor = activity_scaling_factor * current_cal_factor actelem, total_distance = add_activity( element, session, activity, dist_recalc, speed_recalc, calibrate, current_cal_factor, per_lap_cal, manual_lap_distance, activity_scaling_factor, ) except FitParseError as e: sys.stderr.write(str("Error while parsing .FIT file: %s" % e) + "\n") sys.exit(1) if dist_recalc: distance_used = total_calculated_distance elif calibrate: distance_used = total_distance else: distance_used = total_activity_distance method = "" if dist_recalc or speed_recalc or calibrate: parts = [] if calibrate: if per_lap_cal: parts.append("calibration applied per lap") else: parts.append("calibration applied") if dist_recalc and speed_recalc: parts.append("speed and distance recalculated") elif dist_recalc: parts.append("distance recalculated") elif speed_recalc: parts.append("speed recalculated") if calibrate and manual_lap_distance is not None: reference = " from known distance and/or GPS" elif dist_recalc or speed_recalc: reference = " from GPS" method = "(" + ", ".join(parts) + reference + ")" notes = ( "{distance_used:.3f} km in {total_time!s} {dist_method:s}\n" "Distance in FIT file: {fit_dist:.3f} km; " "calculated via GPS: {gps_dist:.3f} km " "(precision: {precision:.1f}%)\n" "Footpod calibration factor setting: {old_cf:.1f}%; " "new factor based on recomputed distance: {new_cf:.1f}%" ).format( distance_used=distance_used / 1000, total_time=timedelta(seconds=int(session.get_value("total_timer_time"))), fit_dist=total_activity_distance / 1000, gps_dist=total_calculated_distance / 1000, precision=(1 - (abs(total_calculated_distance - total_activity_distance) / total_calculated_distance)) * 100, old_cf=current_cal_factor, new_cf=new_cal_factor, dist_method=method, ) add_notes(actelem, notes) add_creator( actelem, activity.messages[0].get_value("manufacturer").capitalize(), activity.messages[0].get_value("product") ) add_author(document) return document
def parse_run(debug, fitfiles): for fitfile_in in fitfiles: if debug: print('#' * 80) print('Debug mode active') print('#' * 80) ########################################## # Parse the fit file ########################################### try: fitfile_processor = StandardUnitsDataProcessor() fitfile = FitFile(fitfile_in, data_processor=fitfile_processor, check_crc=False) fitfile.parse() except FitParseError as err: print('Error while parsing {}: {}'.format(fitfile_in.relpath(), err)) sys.exit(1) # Build our api instances geocoder = OpenMapQuest(api_key=run_app.config['OPEN_MAPQUEST_KEY'], scheme='http', timeout=100) tf = TimezoneFinder() #ureg = UnitRegistry() # Pull manufacturer data for record in fitfile.get_messages('file_id', with_definitions=False): manufacturer = record.get_value('manufacturer') product = record.get_value('garmin_product') for record in fitfile.get_messages('file_creator', with_definitions=False): pass if debug: print(f"device: {manufacturer} -- {product}") print() # Parse all events for record in fitfile.get_messages('event', with_definitions=False): event_group = record.get_value('event_group') timestamp = record.get_value('timestamp') if debug: print(f"event: {event_group} -- {timestamp}") for record_data in record: print(f" * {record_data.name}: {record_data.value}") print() initial = True for record in fitfile.get_messages('record', with_definitions=False): # Parse all fields lat = record.get_value('position_lat') lng = record.get_value('position_long') if lat and lng: timezone = find_timezone(tf, lat, lng) location = geocoder.reverse([lat, lng]).raw else: print('skipping record w/o lat or long\n') continue utc_time = pendulum.instance(record.get_value('timestamp')) local_tz = pendulum.timezone(timezone) local_time = local_tz.convert(utc_time) distance = record.get_value('distance') * ureg.km elevation = record.get_value('enhanced_altitude') * ureg.meter speed = record.get_value( 'enhanced_speed') * ureg.kilometer_per_hour if speed.magnitude > 0: pace = 60 / speed.to(ureg.mile_per_hour).magnitude else: print('too fast for me!') continue if not debug: # Add to the database if initial: print('Setting up initial city/state/country') try: cur_country = db.session.query(Country).filter( Country.name == location['address'] ['country_code']).one() except: cur_country = Country( name=location['address']['country_code']) try: cur_state = db.session.query(State).filter( State.name == location['address']['state']).one() except: cur_state = State(name=location['address']['state'], country=cur_country) try: cur_city = db.session.query(City).filter( City.name == location['address']['city']).one() except: cur_city = City(name=location['address']['city'], state=cur_state) cur_run = Run(cur_city) cur_leg = Leg(cur_run) db.session.add_all( [cur_country, cur_state, cur_city, cur_run, cur_leg]) initial = False point = Point(local_time, elevation.magnitude, lat, lng, distance.to(ureg.meter).magnitude, speed.magnitude, cur_leg, cur_run) print(point) print('Adding prev. point') db.session.add(point) output_str = [] output_str.append( f" * datetime: {local_time.strftime('%Y-%m-%d %H:%M:%S')}") output_str.append(f" * timezone: {timezone}") output_str.append(f" * location: {lat},{lng}") if 'city' in location['address']: output_str.append(f" * city: {location['address']['city']}") else: output_str.append(f" * city: {None}") if 'state' in location['address']: output_str.append(f" * state: {location['address']['state']}") else: output_str.append(f" * state: {None}") if 'country_code' in location['address']: output_str.append( f" * country: {location['address']['country_code']}") else: output_str.append(f" * country: {None}") output_str.append(f" * distance: {distance.to(ureg.mile):02.2~}") output_str.append(f" * elevation: {elevation.to(ureg.foot):.5~}") output_str.append( f" * speed: {speed.to(ureg.mile / ureg.hour):.3~}") output_str.append( f" * pace: {round(pace):02}:{round((pace % 1) * 60):02} min / mi" ) print(f"record: {local_time.strftime('%Y-%m-%d %H:%M:%S')}") print('\n'.join(output_str)) print() if not debug: print('DB session committing') db.session.commit() print('DB session committed')