def test_fitparse_invalid_chained_files(self): """Detect errors when files are chained - concatenated - together""" try: tuple( fitdecode.FitReader( _invalid_test_file('activity-activity-filecrc.fit'))) self.fail("Didn't detect a CRC error in the chained file") except fitdecode.FitCRCError: pass try: tuple( fitdecode.FitReader( _invalid_test_file('activity-settings-corruptheader.fit'))) self.fail("Didn't detect a header error in the chained file") except fitdecode.FitHeaderError: pass try: tuple( fitdecode.FitReader( _invalid_test_file('activity-settings-nodata.fit'))) self.fail("Didn't detect an EOF error in the chaned file") except fitdecode.FitEOFError: pass
def test_fitparse_component_field_resolves_subfield(self): fit_data = _generate_fitfile( _generate_messages( # event (21), local message 1 mesg_num=21, local_mesg_num=1, field_defs=[ # event, event_type, data16 (0, 'enum'), (1, 'enum'), (2, 'uint16')], data=[[0, 0, 2]])) # parse the whole content fit = tuple(fitdecode.FitReader( fit_data, check_crc=True, keep_raw_chunks=False)) event = fit[4] self.assertEqual(event.name, 'event') for field in ('event', 0): self.assertEqual(event.get_field(field).value, 'timer') self.assertEqual(event.get_field(field).raw_value, 0) for field in ('event_type', 1): self.assertEqual(event.get_field(field).value, 'start') self.assertEqual(event.get_field(field).raw_value, 0) # should be able to reference by original field name, component field # name, subfield name, and then the field def_num of both the original # field and component field for field in ('timer_trigger', 'data', 3): self.assertEqual(event.get_field(field).value, 'fitness_equipment') self.assertEqual(event.get_field(field).raw_value, 2) # component field should be left as is for field in ('data16', 2): self.assertEqual(event.get_field(field).value, 2)
def retrieve_data(file_path, log_file): dat = [] session = [] try: with fitdecode.FitReader( file_path, processor=fitdecode.StandardUnitsDataProcessor()) as fit: for frame in fit: r = {} s = {} if isinstance(frame, fitdecode.FitDataMessage): if frame.name == 'record': for name in data_fields: if frame.has_field(name): r[name] = frame.get_value(name) dat.append(r) if frame.name == 'session': for name in session_fields: if frame.has_field(name): s[name] = frame.get_value(name) session.append(s) df = pd.DataFrame(dat) sf = pd.DataFrame(session) with open(log_file, 'a+') as log: log.write('\nThe .fit file was read in...') return df, sf except Exception as e: with open(log_file, 'a+') as log: log.write( f'\nSomething went wrong while trying to read in the .fit file. Exception raised: {e}' )
def parse_fitfile(filepath, resample_dt=0.1, verbose=False): timeseries = [] assert(resample_dt == 0.1) t0 = None with fitdecode.FitReader(filepath) as fit: for frame in fit: # The yielded frame object is of one of the following types: # * fitdecode.FitHeader # * fitdecode.FitDefinitionMessage # * fitdecode.FitDataMessage # * fitdecode.FitCRC # * fitdecode.FitHeader if isinstance(frame, fitdecode.FitDataMessage): if frame.name == 'device_info': for field in frame.fields: if verbose and ( \ ('manufacturer' in field.name) or \ ('product' in field.name) or \ ('device_type' in field.name)): print(field.name, field.value) if frame.name == 'record': for nf, field in enumerate(frame.fields): power = frame.get_value('power') timestamp = frame.get_value('timestamp') if power is None and verbose: print("Power is None") if t0 is None or timestamp != t0: timeseries.append((timestamp, power)) t0 = timestamp df = pd.DataFrame( data=np.array(timeseries)[:,1].astype(float), index=np.array(timeseries)[:,0]) df = df.resample('100ms').interpolate() return df, resample_dt
def test_fitparse_basic_file_with_one_record(self, endian='<'): fit = tuple(fitdecode.FitReader( _generate_fitfile(endian=endian), check_crc=True, keep_raw_chunks=False)) file_header = fit[0] file_id = fit[2] # 1 is the definition message self.assertEqual(file_header.profile_ver, (1, 52)) self.assertEqual(file_header.proto_ver, (1, 0)) self.assertEqual(file_id.name, 'file_id') for field in ('type', 0): self.assertEqual(file_id.get_field(field).value, 'activity') self.assertEqual(file_id.get_field(field).raw_value, 4) for field in ('manufacturer', 1): self.assertEqual(file_id.get_field(field).value, 'garmin') self.assertEqual(file_id.get_field(field).raw_value, 1) for field in ('product', 'garmin_product', 2): self.assertEqual(file_id.get_field(field).value, 'edge500') self.assertEqual(file_id.get_field(field).raw_value, 1036) for field in ('serial_number', 3): self.assertEqual(file_id.get_field(field).value, 558069241) for field in ('time_created', 4): self.assertEqual(file_id.get_field(field).value, _secs_to_dt(723842606)) self.assertEqual(file_id.get_field(field).raw_value, 723842606) for field in ('number', 5): self.assertEqual(file_id.get_field(field).value, None)
def get_dataframes(fname: str) -> Tuple[pd.DataFrame, pd.DataFrame]: """Takes the path to a FIT file (as a string) and returns two Pandas DataFrames: one containing data about the laps, and one containing data about the individual points. """ points_data = [] laps_data = [] lap_no = 1 with fitdecode.FitReader(fname) as fit_file: for frame in fit_file: if isinstance(frame, fitdecode.records.FitDataMessage): if frame.name == 'record': single_point_data = get_fit_point_data(frame) if single_point_data is not None: single_point_data['lap'] = lap_no points_data.append(single_point_data) elif frame.name == 'lap': single_lap_data = get_fit_lap_data(frame) single_lap_data['number'] = lap_no laps_data.append(single_lap_data) lap_no += 1 # Create DataFrames from the data we have collected. If any information is missing from a particular lap or track # point, it will show up as a null value or "NaN" in the DataFrame. laps_df = pd.DataFrame(laps_data, columns=LAPS_COLUMN_NAMES) laps_df.set_index('number', inplace=True) points_df = pd.DataFrame(points_data, columns=POINTS_COLUMN_NAMES) return laps_df, points_df
def extract_lat_lon_from_fit(file, verbose=False) -> list: """ Extracts latitude and longitude from `.fit` files. Handling of FIT files is unfortunately not quite intuitive, but seems to work :param file: The FIT file :param verbose: Verbosity flag :return: A list of latitude and longitude values in the order they appear in the file """ fit_lat_lon_data = [] with fitdecode.FitReader(file) as fit: for frame in fit: if isinstance(frame, fitdecode.FitDataMessage): if frame.name == 'record': if frame.has_field('position_lat') and frame.has_field( 'position_long'): try: fit_lat_lon_data.append([ osm_helpers.sc2deg( frame.get_value('position_lat')), osm_helpers.sc2deg( frame.get_value('position_long')) ]) except TypeError: if verbose: print('Had the following value: {}, {}'.format( frame.get_value('position_lat'), frame.get_value('position_long'))) return fit_lat_lon_data
def main(args=None): options = parse_args(args) frames = [] try: with fitdecode.FitReader( options.infile, processor=fitdecode.StandardUnitsDataProcessor(), check_crc=options.nocrc, keep_raw_chunks=True) as fit: for frame in fit: if options.nodef and isinstance( frame, fitdecode.FitDefinitionMessage): continue if (options.filter and isinstance( frame, (fitdecode.FitDefinitionMessage, fitdecode.FitDataMessage)) and (frame.name not in options.filter) and (frame.global_mesg_num not in options.filter)): continue frames.append(frame) except Exception: print( 'WARNING: the following error occurred while parsing FIT file. ' 'Output file might be incomplete or corrupted.', file=sys.stderr) print('', file=sys.stderr) traceback.print_exc() json.dump(frames, fp=options.output, cls=RecordJSONEncoder) return 0
def parse_import_activity(file: File, conn: sqlite3.Connection): count = 0 with fitdecode.FitReader(file.data) as fit: for frame in fit: if not isinstance(frame, fitdecode.FitDataMessage): continue frame = simple_frame(frame) if frame is None: continue if "timestamp" in frame: timestamp = frame["timestamp"].isoformat() del frame["timestamp"] else: timestamp = None type_ = frame["$type"] del frame["$type"] data = json.dumps(frame, default=json_ser_default) conn.execute( "INSERT INTO frames " "(file_id, type, timestamp, data_json) " "VALUES (?, ?, ?, ?)", (file.id, type_, timestamp, data), ) count += 1 return count
def test_fitparse_unexpected_eof(self): try: tuple(fitdecode.FitReader( _invalid_test_file('activity-unexpected-eof.fit'), check_crc=True, keep_raw_chunks=True)) self.fail('did not detect an unexpected EOF') except fitdecode.FitEOFError: pass
def test_fitparse_elemnt_bolt_developer_data_id_without_application_id(self): """ Test that a file without application id set inside developer_data_id is parsed (as seen on ELEMNT BOLT with firmware version WB09-1507) """ tuple(fitdecode.FitReader( _invalid_test_file('elemnt-bolt-no-application-id-inside-developer-data-id.fit'), check_crc=True, keep_raw_chunks=True))
def test_fitparse_invalid_crc(self): try: tuple(fitdecode.FitReader( _invalid_test_file('activity-filecrc.fit'), check_crc=True, keep_raw_chunks=True)) self.fail('did not detect an invalid CRC') except fitdecode.FitCRCError: pass
def test_fitparse_speed(self): fit = fitdecode.FitReader( _test_file('2019-02-17-062644-ELEMNT-297E-195-0.fit')) # find the first 'session' data message msg = next( r for r in fit if isinstance(r, fitdecode.FitDataMessage) and r.name == 'session') self.assertEqual(msg.get_value('avg_speed', fit_type='uint16'), 5.86)
def FITtoDataFrame(filename): """ Reads a FIT file and returns a dataframe with the important parameters. 'name','desc','segno','dist','lat','lng','ele','time','duration','speed', 'heartrate', 'power' """ gpxinfo = list() with fitdecode.FitReader(filename) as fit: for frame in fit: # The yielded frame object is of one of the following types: # * fitdecode.FitHeader # * fitdecode.FitDefinitionMessage # * fitdecode.FitDataMessage # * fitdecode.FitCRC try: if isinstance(frame, fitdecode.FitDataMessage): if frame.name == 'record': timestamp = frame.get_value( 'timestamp', fallback='field not present') position_lat = frame.get_value('position_lat', fallback=0) / 11930465 position_long = frame.get_value('position_long', fallback=0) / 11930465 distance = frame.get_value( 'distance', fallback='field not present') altitude = frame.get_value( 'altitude', fallback='field not present') power = frame.get_value('power', fallback='field not present') heart_rate = frame.get_value( 'heart_rate', fallback='field not present') cadence = frame.get_value('cadence', fallback='field not present') temperature = frame.get_value( 'temperature', fallback='field not present') gpxinfo.append([ 'Fit', 'Fit', 1, distance, position_lat, position_long, altitude, timestamp, -1, heart_rate, power, cadence, temperature ]) # for field in frame.fields: # print(field.name,field.value) except: print('ERROR') gpxdf = pd.DataFrame(gpxinfo, columns=[ 'name', 'desc', 'segno', 'dist', 'lat', 'lon', 'ele', 'time', 'duration', 'heartrate', 'power', 'cad', 'temp' ]) gpxdf = gpxdf.set_index('time') return gpxdf
def get_distance(fit_path, key): g = 9.81 #gravity in m/s^2 m = 79.4 + 1 #rider + bike mass in kg with 1kg more simulating the wheel's rotational intertia Crr = 0.005 #approximate rolling resistance CdA = 0.324 #approximate CdA in m^2 - hands on hoods elbows bent - can be varied Rho = 1.225 #air density sea level STP dt = 1 #time step from the fit file; will be updated below speed_total = 0 #add up all the speeds, later divide by total steps to get average Vi = 0 #initialize the starting speed at 0 count = 0 #used to average the speed time_prev = None #last fit message time, for edge cases when not just 1 second intervals total_time = 0 #length of the ride in seconds with fitdecode.FitReader(fit_path) as fit: for frame in fit: if isinstance( frame, fitdecode.FitDataMessage) and frame.has_field('power'): time_current = frame.get_field('timestamp').value if time_prev: dt = (time_current - time_prev).seconds total_time += dt p = (frame.get_field('power').value) Vf = ((-dt * (CdA * Rho * Vi**3 - 2 * p + 2 * Crr * Vi * g * m) + Vi**2 * m) / m)**.5 speed_total += Vf count += 1 Vi = Vf time_prev = time_current v = speed_total * 2.23694 / count #convert from m/s to mph and average t = total_time / 3600 #create a string with the results string = 'Average Speed: {:.2f} mph - Time: {:.2f} hours - Distance: {:.2f} miles'.format( v, t, (v * t)) #write the data to a file file_name = '{}.txt'.format(key) lambda_path = '/tmp/{}'.format(file_name) s3_path = file_name with open(lambda_path, 'w+') as file: file.write(string) file.close() #move the file to the tr-fit-results bucket s3 = boto3.resource('s3') s3.meta.client.upload_file(lambda_path, 'tr-fit-results', s3_path, ExtraArgs={'ACL': 'public-read'})
def dump_fitfile(filepath): dump = "" with fitdecode.FitReader(filepath, keep_raw_chunks=True) as fit: for frame in fit: # The yielded frame object is of one of the following types: # * fitdecode.FitHeader # * fitdecode.FitDefinitionMessage # * fitdecode.FitDataMessage # * fitdecode.FitCRC # * fitdecode.FitHeader if isinstance(frame, fitdecode.FitDataMessage): dump += frame.name + '\n' for field in frame.fields: dump += '\t' + field.name + ' ' + str(field.value) + '\n' return dump
def main(args=None): options = parse_args(args) def _echo(*objects, sep=' ', end='\n', file=options.output, flush=False): print(*objects, sep=sep, end=end, file=file, flush=flush) def _echo_separator(): _echo('') _echo('*' * 80) _echo('') _echo('') global echo echo = _echo # fully parse input file and filter out the unwanted messages frames = [] with fitdecode.FitReader(options.infile, processor=fitdecode.StandardUnitsDataProcessor(), check_crc=not (options.nocrc), keep_raw_chunks=True) as fit: for frame in fit: if options.nodef and isinstance(frame, fitdecode.FitDefinitionMessage): continue if (options.filter and isinstance( frame, (fitdecode.FitDefinitionMessage, fitdecode.FitDataMessage)) and (frame.name not in options.filter) and (frame.global_mesg_num not in options.filter)): continue frames.append(frame) # print some statistics before starting txt_print(global_stats(frames, options)) echo('') # pretty-print the file had_frames = False for frame in frames: if had_frames and isinstance(frame, fitdecode.FitHeader): _echo_separator() had_frames = True txt_print(frame) echo('')
def test_fitparse_component_field_accumulaters(self): csv_fp = open(_test_file('compressed-speed-distance-records.csv'), mode='rt') csv_file = csv.reader(csv_fp) next(csv_file) # consume header # parse the whole content fit = tuple( fitdecode.FitReader(_test_file('compressed-speed-distance.fit'), check_crc=fitdecode.CrcCheck.ENABLED, keep_raw_chunks=False)) # make a generator of 'record' messages records = ( r for r in fit if isinstance(r, fitdecode.FitDataMessage) and r.name == 'record') # skip empty record for now (sets timestamp via header) empty_record = next(records) # file's timestamp record is < 0x10000000, so field returns seconds self.assertEqual(empty_record.get_field('timestamp').value, 17217864) # TODO: update using local_timestamp as offset, since we have this value # as 2012 date for count, (record, (timestamp, heartrate, speed, distance, cadence)) in enumerate(zip(records, csv_file)): # no fancy datetime stuff, since timestamp record is < 0x10000000 fit_ts = record.get_field('timestamp').value self.assertIsInstance(fit_ts, int) self.assertLess(fit_ts, 0x10000000) self.assertEqual(fit_ts, int(timestamp)) self.assertEqual( record.get_field('heart_rate').value, int(heartrate)) self.assertEqual( record.get_field('cadence').value, int(cadence) if cadence != 'null' else None) self.assertAlmostEqual( record.get_field('speed').value, float(speed)) self.assertAlmostEqual( record.get_field('distance').value, float(distance)) self.assertEqual(count, 753) # TODO: confirm size(records) = size(csv) csv_fp.close()
def test_fitparse_units_processor(self): for x in ('2013-02-06-12-11-14.fit', '2015-10-13-08-43-15.fit', 'Activity.fit', 'Edge810-Vector-2013-08-16-15-35-10.fit', 'MonitoringFile.fit', 'Settings.fit', 'Settings2.fit', 'WeightScaleMultiUser.fit', 'WeightScaleSingleUser.fit', 'WorkoutCustomTargetValues.fit', 'WorkoutIndividualSteps.fit', 'WorkoutRepeatGreaterThanStep.fit', 'WorkoutRepeatSteps.fit', 'activity-large-fenxi2-multisport.fit', 'activity-small-fenix2-run.fit', 'antfs-dump.63.fit', 'sample-activity-indoor-trainer.fit', 'sample-activity.fit', 'garmin-fenix-5-bike.fit', 'garmin-fenix-5-run.fit', 'garmin-fenix-5-walk.fit', 'garmin-edge-820-bike.fit'): tuple( fitdecode.FitReader( _test_file(x), processor=fitdecode.StandardUnitsDataProcessor()))
def _parse(self, fpath: str): with fitdecode.FitReader(fpath) as fit: for frame in fit: if isinstance(frame, fitdecode.FitDataMessage): if frame.name == 'record': self._parse_record(frame) elif frame.name == 'lap': self._parse_lap(frame) elif frame.name == 'session': self._parse_session(frame) self._points = self._handle_points_data( pd.DataFrame(self._points_data, columns=self.INITIAL_COL_NAMES_POINTS)) self._laps = self._infer_laps_data( pd.DataFrame(self._laps_data, columns=self.INITIAL_COL_NAMES_LAPS).set_index('lap'), self._points)
def test_raw_chunk_parsing(self): """ Test that FitReader parses correctly all our "valid" test files by building an in-memory clone of each source file, chunk by chunk, and then match file's and bytes object's checksums Files with developer types (at least):: developer-types-sample.fit 20170518-191602-1740899583.fit DeveloperData.fit Chained files (at least):: activity-settings.fit """ for src_file in glob.iglob(os.path.join(TEST_FILES_DIR, '*.fit')): raw_content = b'' # read src_file chunk by chunk try: with fitdecode.FitReader(src_file, check_crc=fitdecode.CrcCheck.ENABLED, keep_raw_chunks=True) as fit: for record in fit: raw_content += record.chunk.bytes except Exception: print('ERROR while parsing:', src_file) raise # checksum of src_file h1 = hashlib.new(HASH_METHOD) with open(src_file, mode='rb') as fin: for buff in iter(lambda: fin.read(64 * 1024), b''): h1.update(buff) # checksum of raw_content h2 = hashlib.new(HASH_METHOD) h2.update(raw_content) # compare checksums self.assertEqual(h1.digest(), h2.digest())
def readFitFile(fileName): print("Reading fit file ...") #src_file = "%s%s%s"%("C:/Users/B.Brzozowski/Downloads/",fileName,".fit") file = "%s_ACTIVITY.fit" % (fileName) result = {"max_altitude": 80} with fitdecode.FitReader(file) as fit: for frame in fit: if isinstance(frame, fitdecode.FitDataMessage): for x in range(len(frame.fields)): if "altitude" in frame.fields[x].name: if (result["max_altitude"] < frame.fields[x].value): result["max_altitude"] = frame.fields[x].value if "avg_heart_rate" in frame.fields[x].name: result["avg_heart_rate"] = frame.fields[x].value if "max_heart_rate" in frame.fields[x].name: result["max_heart_rate"] = frame.fields[x].value if "total_timer_time" in frame.fields[x].name: result["total_timer_time"] = frame.fields[x].value if "max_speed" in frame.fields[x].name: result["max_speed"] = frame.fields[x].value if "total_ascent" in frame.fields[x].name: result["total_ascent"] = frame.fields[x].value if "total_descent" in frame.fields[x].name: result["total_descent"] = frame.fields[x].value if "max_speed" in frame.fields[x].name: result["max_speed"] = frame.fields[x].value if "avg_speed" in frame.fields[x].name: result["avg_speed"] = frame.fields[x].value if "total_calories" in frame.fields[x].name: result["total_calories"] = frame.fields[x].value if "start_time" in frame.fields[x].name: result["start_time"] = frame.fields[x].value if "total_timer_time" in frame.fields[x].name: result["total_timer_time"] = frame.fields[x].value if "total_distance" in frame.fields[x].name: result["total_distance"] = frame.fields[x].value for k, v in result.items(): print(k, v) return result
def _decode_fit_file(stream, container): try: with fitdecode.FitReader(stream, processor = _UtcTimeProcessor()) \ as reader: for frame in reader: if isinstance(frame, fitdecode.FitDataMessage): if frame.name == 'record': hr, ts = (None, None) for field in frame.fields: if field.name == 'timestamp': ts = field.value elif field.name == 'heart_rate': hr = field.value # There was a case of input with an incomplete # record. Check that both are available before # emitting if ts and hr: yield Datum(ts, hr) elif frame.name == 'session': container.cals = frame.get_value('total_calories') except fitdecode.exceptions.FitError: pass
def decodeFitFile(file, name): power = [] obj = {} with fitdecode.FitReader(file) as fit: for frame in fit: if isinstance(frame, fitdecode.FitDataMessage): if frame.name == 'file_id': if frame.has_field('time_created'): obj['date'] = frame.get_value('time_created').isoformat() if frame.name == 'record': if frame.has_field('power'): power.append(frame.get_value('power')) zones, _ = np.histogram(np.array(power), bins=[0, 176, 220, 2000]) obj['zones'] = zones.tolist() obj['samples'] = len(power) obj['name'] = name.replace('.fit', '') print(zones) return obj
def main(args=None): options = parse_args(args) frames = [] with fitdecode.FitReader(options.infile, processor=fitdecode.StandardUnitsDataProcessor(), check_crc=not (options.nocrc), keep_raw_chunks=True) as fit: for frame in fit: if options.nodef and isinstance(frame, fitdecode.FitDefinitionMessage): continue if (options.filter and isinstance( frame, (fitdecode.FitDefinitionMessage, fitdecode.FitDataMessage)) and (frame.name not in options.filter) and (frame.global_mesg_num not in options.filter)): continue frames.append(frame) json.dump(frames, fp=options.output, cls=RecordJSONEncoder)
def _fitparse_csv_test_helper(self, fit_file, csv_file): csv_fp = open(_test_file(csv_file), 'r') csv_messages = csv.reader(csv_fp) field_names = next(csv_messages) # consume header # parse the whole content fit = tuple(fitdecode.FitReader( _test_file(fit_file), check_crc=True, keep_raw_chunks=False)) # make a generator of 'record' messages messages = ( r for r in fit if isinstance(r, fitdecode.FitDataMessage) and r.name == 'record') # for fixups last_valid_lat, last_valid_long = None, None for message, csv_message in zip(messages, csv_messages): for csv_index, field_name in enumerate(field_names): try: fit_value = message.get_field(field_name).value except KeyError: fit_value = None csv_value = csv_message[csv_index] if field_name == 'timestamp': # adjust GMT to PDT and format fit_value = (fit_value - datetime.timedelta(hours=7)).strftime("%a %b %d %H:%M:%S PDT %Y") # track last valid lat/longs if field_name == 'position_lat': if fit_value is not None: last_valid_lat = fit_value if field_name == 'position_long': if fit_value is not None: last_valid_long = fit_value # ANT FIT SDK Dump tool does a bad job of logging invalids, so fix them if fit_value is None: # ANT FIT SDK Dump tool cadence reports invalid as 0 if field_name == 'cadence' and csv_value == '0': csv_value = None # ANT FIT SDK Dump tool invalid lat/lng reports as last valid if field_name == 'position_lat': fit_value = last_valid_lat if field_name == 'position_long': fit_value = last_valid_long if isinstance(fit_value, int): csv_value = int(fit_value) if csv_value == '': csv_value = None if isinstance(fit_value, float): # float comparison self.assertAlmostEqual(fit_value, float(csv_value)) else: self.assertEqual( fit_value, csv_value, msg="For %s, FIT value '%s' did not match CSV value '%s'" % (field_name, fit_value, csv_value)) try: next(messages) self.fail(".FIT file had more than csv file") except StopIteration: pass try: next(csv_messages) self.fail(".CSV file had more messages than .FIT file") except StopIteration: pass csv_fp.close()
def test_fitparse_subfield_components(self): # score = 123, opponent_score = 456, total = 29884539 sport_point_value = 123 + (456 << 16) # rear_gear_num = 4, rear_gear, = 20, front_gear_num = 2, front_gear = 34 gear_chance_value = 4 + (20 << 8) + (2 << 16) + (34 << 24) fit_data = _generate_fitfile( _generate_messages( # event (21), local message 1 mesg_num=21, local_mesg_num=1, field_defs=[ # event, data (0, 'enum'), (3, 'uint32')], data=[ # sport point [33, sport_point_value], # front gear change [42, gear_chance_value]])) # parse the whole content fit = tuple(fitdecode.FitReader( fit_data, check_crc=True, keep_raw_chunks=False)) sport_point = fit[4] self.assertEqual(sport_point.name, 'event') for field in ('event', 0): self.assertEqual(sport_point.get_field(field).value, 'sport_point') self.assertEqual(sport_point.get_field(field).raw_value, 33) for field in ('sport_point', 'data', 3): # verify raw numeric value self.assertEqual(sport_point.get_field(field).value, sport_point_value) for field in ('score', 7): self.assertEqual(sport_point.get_field(field).value, 123) for field in ('opponent_score', 8): self.assertEqual(sport_point.get_field(field).value, 456) gear_change = fit[5] self.assertEqual(gear_change.name, 'event') for field in ('event', 0): self.assertEqual(gear_change.get_field(field).value, 'front_gear_change') self.assertEqual(gear_change.get_field(field).raw_value, 42) for field in ('gear_change_data', 'data', 3): # verify raw numeric value self.assertEqual(gear_change.get_field(field).value, gear_chance_value) for field in ('front_gear_num', 9): self.assertEqual(gear_change.get_field(field).value, 2) for field in ('front_gear', 10): self.assertEqual(gear_change.get_field(field).value, 34) for field in ('rear_gear_num', 11): self.assertEqual(gear_change.get_field(field).value, 4) for field in ('rear_gear', 12): self.assertEqual(gear_change.get_field(field).value, 20)
def __init__(self, fit_fileish): with fitdecode.FitReader(fit_fileish) as fit: self._fit_frames = list([frame for frame in fit])
def main(args=None): options = parse_args(args) def _echo(*objects, sep=' ', end='\n', file=options.output, flush=False): print(*objects, sep=sep, end=end, file=file, flush=flush) def _echo_separator(): _echo('') _echo('*' * 80) _echo('') _echo('') global echo echo = _echo # fully parse input file and filter out the unwanted messages frames = [] exception_msg = None try: with fitdecode.FitReader( options.infile, processor=fitdecode.StandardUnitsDataProcessor(), check_crc=options.nocrc, keep_raw_chunks=True) as fit: for frame in fit: if options.nodef and isinstance( frame, fitdecode.FitDefinitionMessage): continue if (options.filter and isinstance(frame, ( fitdecode.FitDefinitionMessage, fitdecode.FitDataMessage)) and (frame.name not in options.filter) and (frame.global_mesg_num not in options.filter)): continue frames.append(frame) except Exception: print( 'WARNING: error(s) occurred while parsing FIT file. ' 'See output file for more info.', file=sys.stderr) exception_msg = traceback.format_exc() # print some statistics as a header if not exception_msg: txt_print(global_stats(frames, options)) echo('') else: echo('ERROR OCCURRED WHILE PARSING', options.infile.name) echo('') echo(exception_msg) echo('') # pretty-print the file had_frames = False for frame in frames: if had_frames and isinstance(frame, fitdecode.FitHeader): _echo_separator() had_frames = True txt_print(frame) echo('') return 0
def test_fitparse_int_long(self): """Test that ints are properly shifted and scaled""" fit = tuple(fitdecode.FitReader(_test_file('event_timestamp.fit'))) raw_value = fit[-2].get_value('event_timestamp', idx=0, raw_value=True) self.assertEqual(raw_value, 863.486328125)