def parse(self): if self.status != Fit.Status.NONE: return self.status = Fit.Status.PARSING self.emit('status-changed', self.status) try: self.fit = fitparse.FitFile( self.filename, data_processor=fitparse.StandardUnitsDataProcessor()) except FitParseError: self.status = Fit.Status.FAILED GObject.idle_add(lambda: self.emit('status-changed', self.status)) return self.fit.parse() # find the summary message for msg in self.fit.messages: if msg.name == 'session': self.summary = msg break self.status = Fit.Status.PARSED GObject.idle_add(lambda: self.emit('status-changed', self.status))
def _parse(self): try: fit_file = fitparse.FitFile(self.file_path) data = fit_file.get_messages() except fitparse.FitParseError as err: raise self.FileParsingError(err) for item in data: value = item.get_values() if item.name.lower() in FIT_DATA_ACTIVITY_RECORD[1]: self._parse_activity_record(value) elif item.name.lower() in FIT_DATA_GEAR[1]: self._parse_gear(value) elif item.name.lower() in FIT_DATA_ACTIVITY[1]: self._parse_activity(value) elif item.name.lower() in FIT_DATA_TRAVELLER[1]: self._parse_activity(value) else: self._parse_misc(value) self._result = {FIT_DATA_ACTIVITY_RECORD(0): self._activity_record, FIT_DATA_GEAR(0): self._gears, FIT_DATA_ACTIVITY(0): self._activity, FIT_DATA_TRAVELLER(0): self._traveller, FIT_DATA_UNCLASSIFIED(0): self._unclassified, }
def main(input_directory, output_file): logging.info(f"processing {input_directory}") dirs = [dI for dI in os.listdir(input_directory) if os.path.isdir(join(input_directory,dI))] dirs.sort() headers = set(['timestamp', 'heart_rate', 'activity_type', 'activity_type_last_timestamp']) data = [] for dir in dirs: current_dir = join(input_directory,dir) logging.info(f"processing directory {current_dir}") files = os.listdir(current_dir) files.sort() fit_files = [file for file in files if file[-4:].lower() == '.fit'] for file in fit_files: current_file =join(current_dir, file) fitfile = fitparse.FitFile(current_file, data_processor=fitparse.StandardUnitsDataProcessor()) logging.info(f"converting {current_file}") convert_file(fitfile, headers, data) write_to_csv(headers, data, output_file) logging.info('finished conversions')
def main(): for folder in folders: # returns a list containing the names of the entries in the directory given by path path = os.path.join(os.getcwd(), 'Data Collection', folder) person_dirs = os.listdir(path) # dirs --> ['04-112520-245pm-JJ Kim'] for psdir in person_dirs: person = dict() person[psdir.split('-')[-1]] = psdir.split('-')[0:3] # person --> {'Hannah Flynn': ['08', '112720', '124pm']} # Obtain .fit files garmincon_path = os.path.join(path, psdir, "Garmin Connect") # garmincon_path --> ./Garmin Connect dirs = os.listdir(garmincon_path) # dirs_temp --> ['2020-11-27-124pm'] garmin_folder_path = os.path.join(garmincon_path, dirs[0]) # only one dir in dirs --> dirs[0] fitfiles = os.listdir(garmin_folder_path) fitfiles_wellness = [file for file in fitfiles if file[-13:].lower() == '_wellness.fit'] for file in fitfiles_wellness: csv_filename = file[:-4] + '.csv' if os.path.exists(csv_filename): continue fitfile_path = os.path.join(garmin_folder_path, file) fitfile = fitparse.FitFile(fitfile_path, data_processor=fitparse.StandardUnitsDataProcessor()) print('converting %s' % fitfile_path) write_fitfile_to_csv(fitfile, garmin_folder_path, csv_filename) print('finished conversions') collect_person(person, garmin_folder_path)
def GetFitDf(filepath): """Parse a Garmin Fit file and return a DataFrame Args: filepath: String. Path and file corresponding with a Garmin Fit file Returns: df: DataFrame. DataFrame with corresponding Garmin Fit data """ fitfile = fitparse.FitFile(filepath) df = pd.DataFrame({}) for idx, record in enumerate(fitfile.get_messages('record')): # Go through all the data entries in this record for record_data in record: key = record_data.name key = key.replace('_', ' ') val = record_data.value unit = record_data.units key = '%s [%s]' % (key.title(), unit) df.loc[idx, key] = val return df
def parse_fit_file( file: Union[fitparse.base.FitFile, bytes, str, ]) -> pd.DataFrame: """Converts a fit_file to a dataframe Args: file (Union[fitparse.base.FitFile, bytes, str]): The fit file to parse Raises: ValueError: If the file is not in a supported format Returns: pd.DataFrame: A DataFrame with the data """ if isinstance( file, ( bytes, str, ), ): fit_file = fitparse.FitFile(file) elif isinstance( file, fitparse.base.FitFile, ): fit_file = file else: raise ValueError(f"{type(file)} is not supported!") return _parse_records(fit_file.get_messages("record"))
def main(): '''Given a list of file names on the command line, parse each of them as FIT files and then export them to GPX-formatted files, named according to their timestamps.''' parser = argparse.ArgumentParser() parser.add_argument('gpx_file', nargs='+') args = parser.parse_args() for input_filename in args.gpx_file: print('processing', input_filename) with open_file(input_filename) as input_file: fitobj = fitparse.FitFile(input_file) fitobj.parse() time_created, gpx_data = convert_to_gpx(fitobj) if not gpx_data: print('no track data for {}; skipping'.format(input_filename)) continue output_filename = re.sub( '[^-a-z_0-9.]', '_', 'converted_from_fit_{}.gpx'.format(time_created)) with open(output_filename, 'w') as out: out.write(gpx_data)
def write_fitfile_to_csv(file, output_file='test_output.csv'): fitfile = fitparse.FitFile( file, data_processor=fitparse.StandardUnitsDataProcessor()) messages = fitfile.messages data = [] for m in messages: skip = False if not hasattr(m, 'fields'): continue fields = m.fields #check for important data types mdata = {} for field in fields: if field.name in allowed_fields: if field.name == 'timestamp': mdata[field.name] = UTC.localize( field.value).astimezone(CST) else: mdata[field.name] = field.value for rf in required_fields: if rf not in mdata: skip = True if not skip: data.append(mdata) #write to csv with open(output_file, 'w') as f: writer = csv.writer(f) writer.writerow(allowed_fields) for entry in data: writer.writerow([str(entry.get(k, '')) for k in allowed_fields])
def process_activity_file(self, filepath): ret_dict = {} ext = os.path.splitext(filepath)[-1].lower() dirname = os.path.dirname(filepath) if ext == ".zip": activity = os.path.splitext(os.path.basename(filepath))[0] extract_path = dirname + os.path.sep + activity + os.path.sep zip_ref = zipfile.ZipFile(filepath, 'r') zip_ref.extractall(extract_path) zip_ref.close() fit_file = extract_path + activity + '.fit' if os.path.exists(fit_file): fitfile = fitparse.FitFile(fit_file) timestamp = [] heartrate = [] # Get all data messages that are of type record for record in fitfile.get_messages('record'): # Go through all the data entries in this record for record_data in record: if record_data.name == "heart_rate": heartrate.append(record_data.value) elif record_data.name == "timestamp": timestamp.append(record_data.value) timestamp_len = len(timestamp) heartrate_len = len(heartrate) cal_len = min(timestamp_len, heartrate_len) zone1_total_time_min = 0 zone2_total_time_min = 0 for i in range(cal_len - 1): time_diff = timestamp[i + 1] - timestamp[i] time_diff_sec = time_diff.total_seconds() if ZONE1 <= heartrate[i] < ZONE2: zone1_total_time_min += time_diff_sec / 60.0 elif heartrate[i] >= ZONE2: zone2_total_time_min += time_diff_sec / 60.0 zone1_total = zone1_total_time_min * ZONE1_MONEY zone2_total = zone2_total_time_min * ZONE2_MONEY money_earned = zone1_total + zone2_total ret_dict = { 'money_earned': money_earned, 'zone1_minutes': zone1_total_time_min, 'zone2_minutes': zone2_total_time_min, 'start_time': timestamp[0] } return ret_dict
def convertgeofilesbatch(inputpath, outputpath): files = os.scandir(inputpath) countTcx = 0 countFit = 0 countGpx = 0 results = [] results.append([ 'Filename', 'Outputfile', 'Linecount', 'Errorfound', 'Errorcount', 'Errortext' ]) print( '\n\n .............. Converting GPX/TCX/FIT files to a standardised CSV intermediate format \n\n' ) for file in files: inputfilename = file.name fileid = '_' + inputfilename[:-4] if file.name[-3:].upper() == 'FIT': outputfilename = '_fit' + inputfilename[:-3] + 'csv' cvinput = inputpath + inputfilename cvoutput = outputpath + outputfilename print('FIT : ', cvinput) fitfile = fitparse.FitFile( cvinput, data_processor=fitparse.StandardUnitsDataProcessor()) lines = convertrideFIT(fitfile, cvoutput, fileid) results.append([file.name, outputfilename, lines, '', '', '']) elif file.name[-3:].upper() == 'TCX': outputfilename = '/_tcx' + ''.join(file.name) countTcx += 1 prepTCXfile(inputpath, file.name, outputpath, outputfilename) cvinput = inputpath + inputfilename cvoutput = outputpath + outputfilename[0:-3] + 'csv' print('TCX : ', cvinput) lines, error, errorcount, lasterror = convertrideTCX( cvinput, cvoutput, fileid) results.append([ file.name, outputfilename[1:-3] + 'csv', lines, error, errorcount, lasterror ]) elif file.name[-3:].upper() == 'GPX': outputfilename = '/_gpx' + ''.join(file.name) countGpx += 1 cvinput = inputpath + inputfilename cvoutput = outputpath + outputfilename[0:-3] + 'csv' print('GPX : ', cvinput) lines, error, errorcount, lasterror = convertrideGPX( cvinput, cvoutput, fileid) results.append([ file.name, outputfilename[1:-3] + 'csv', lines, error, errorcount, lasterror ]) with open(outputpath + '/' + '___process_report.csv', 'w', newline='') as f: writer = csv.writer(f) for line in results: writer.writerow(line[i] for i in range(0, len(line))) f.close()
def __init__(self, path: str): """Open fit file Args: path: Path to the .fit file """ self.__fitfile = fitparse.FitFile(path) self.__fields = None self.__length = None
def main(file): new_filename = file[:-4] + '.csv' fitfile = fitparse.FitFile( file, data_processor=fitparse.StandardUnitsDataProcessor()) print('converting %s' % file) filefile = write_fitfile_to_csv(fitfile, new_filename) print('finished conversions') return filefile
def file_to_dataframe(input_file: str) -> pd.DataFrame: """ Record message to Pandas DataFrame :param input_file: str :return: Pandas DataFrame """ log.info(f'opening {input_file}') fitfile = fitparse.FitFile(input_file) return messages_to_dataframe(fitfile.get_messages('record'))
def __init__(self, filepath): """ :param filepath: path to fit file directory :param fitfile: fit file parser object :param heartrate: pandas dataframe with heart rate data """ self.filepath = filepath self.fitfile = fitparse.FitFile(filepath) self.datets = pd.to_datetime(os.path.basename(filepath)[:17]) self.heartrate = self._get_heartrate_data() self.add_heartrate_detla()
def readfitfile(fname): # print(options) fitfile = fitparse.FitFile( fname, data_processor=fitparse.StandardUnitsDataProcessor(), check_crc=not (None), ) records = fitfile.get_messages(name=None, with_definitions=False, as_dict=False) return records
def dump(self, fit_file): fitfile = fitparse.FitFile( fit_file, data_processor=fitparse.StandardUnitsDataProcessor(), ) activity = model.Activity() data = {} for message in fitfile.get_messages(): self._handle_message(message, activity, data) return activity
def read_fit(filename): # Load the FIT file fitfile = fitparse.FitFile(filename) list_ = [] for record in fitfile.get_messages("record"): d = {} for data in record: d[data.name] = data.value list_.append(d) df_ = pd.DataFrame(list_) df_['trackname'] = os.path.splitext(filename)[0] return df_
def app(input_file, output_folder, dump_units=False): try: fitfile = fitparse.FitFile(input_file) except Exception as e: log.error(e) raise msg_types = set([message.name for message in fitfile.get_messages()]) for msg_type in set(msg_types): df = messages_to_dataframe(fitfile.get_messages(msg_type), dump_units=False) df.to_html(os.path.join(output_folder, msg_type + '.html'))
def fit2json(filename): '''Convert a fit file into a human readable json.''' try: fitfile = fitparse.FitFile(str(filename)) x = { 'sport': get_sport(fitfile), 'records': get_records(fitfile), 'filename': filename } return json.dumps(x, indent=4, sort_keys=True, default=str) except Exception as ex: print(f'{filename}: {ex}') exit(1)
def main(): files = os.listdir() fit_files = [file for file in files if file[-4:].lower() == '.fit'] for file in fit_files: new_filename = file[:-4] + '.csv' if os.path.exists(new_filename): #print('%s already exists. skipping.' % new_filename) continue fitfile = fitparse.FitFile( file, data_processor=fitparse.StandardUnitsDataProcessor()) print('converting %s' % file) write_fitfile_to_csv(fitfile, new_filename) print('finished conversions')
def _load_fit(self, file_name: str): self.clear() fit = fitparse.FitFile(file_name) segment: List[TrackPoint] = [] debug = False for message in fit.get_messages(): if debug: print(message.mesg_type.name if message. mesg_type is not None else message.mesg_type) print(message.get_values()) if message.mesg_type is not None: if message.mesg_type.name == "session": self._parse_fit_session_message(message) elif (message.mesg_type.name == "sport") or (message.mesg_type.name == "lap"): self._parse_fit_sport_message(message) elif message.mesg_type.name == "event": event = message.get_values()["event"] event_type = message.get_values()["event_type"] if event == "timer": if (event_type == "stop") or (event_type == "stop_all"): if len(segment) != 0: self._segments.append(segment) segment = [] elif message.mesg_type.name == "record": time = None lat = None lng = None altitude = None if "timestamp" in message.get_values(): time = message.get_values()["timestamp"] self._update_time_bounds(time) if "position_lat" in message.get_values(): lat = message.get_values()["position_lat"] if "position_long" in message.get_values(): lng = message.get_values()["position_long"] if "altitude" in message.get_values(): altitude = message.get_values()["altitude"] if (time is not None) and (lat is not None) and (lng is not None): lat = 180.0 * (float(lat) / float(0x7FFFFFFF)) lng = 180.0 * (float(lng) / float(0x7FFFFFFF)) p = TrackPoint(time, s2sphere.LatLng.from_degrees(lat, lng), altitude) segment.append(p) if len(segment) != 0: self._segments.append(segment)
def __init__(self, filename): self.fit_filepath = None self.fit_data = [] if filename is not None and len(filename) > 0: self.fit_filepath = filename try: fit_file = fitparse.FitFile(self.fit_filepath) for record in fit_file.get_messages("record"): point = self._get_record_data(record) if point is not None: self.fit_data.append(point) except: self.fit_data = None if self.get_num_of_tracks() > 0: self._strip_ends_for_no_movement_tracks()
def main(): files = os.listdir(fit_data_path) fit_files = [file for file in files if file[-4:].lower() == '.fit'] for file in fit_files: new_filename = file[:-4] + '.csv' if os.path.exists(new_filename): # print('%s already exists. skipping.' % new_filename) continue fitfile = fitparse.FitFile( '{}/fit_data/{}'.format(os.path.pardir, file), data_processor=fitparse.StandardUnitsDataProcessor()) print('Converting {}'.format(file)) write_fitfile_to_csv( fitfile, '{}/fit_data/{}'.format(os.path.pardir, new_filename)) print('Convert Finished.')
def main(): # get the fit files in order (maybe) files = sorted(Path(datadir).glob('*.fit')) for f in files: ff = datadir + '/' + f.name fitfile = fitparse.FitFile(ff) print("converting {}".format(f.name)) output_messages(fitfile) fi = datadir + '/' + 'op.csv' output_csv(timestamp, hrdata, fi)
def main(args=None): options = parse_args(args) fitfile = fitparse.FitFile( options.infile, data_processor=fitparse.StandardUnitsDataProcessor(), check_crc=not (options.ignore_crc), ) records = fitfile.get_messages(name=options.name, with_definitions=options.with_defs, as_dict=options.as_dict) if options.type == "json": json.dump(records, fp=options.output, cls=RecordJSONEncoder) elif options.type == "readable": options.output.writelines( format_message(n, record, options) for n, record in enumerate(records, 1))
def main( fit_target_dir, fit_processed_csv_dir, fit_overwrite, fit_ignore_splits_and_laps, ): ALT_LOG = os.path.join(fit_processed_csv_dir, ALT_LOG_) files = os.listdir(fit_target_dir) fit_files = [file for file in files if file[-4:].lower()=='.fit'] overwritten_files = [] if not os.path.exists(ALT_LOG): os.system('touch %s' % ALT_LOG) file_list = [] else: file_list = read_log(fit_processed_csv_dir) for file in fit_files: is_overwritten=False if file in file_list and not fit_overwrite: continue elif file in file_list: is_overwritten=True new_filename = file[:-4] + '.csv' fitfile = fitparse.FitFile( os.path.join(fit_target_dir, file), data_processor=fitparse.StandardUnitsDataProcessor() ) print('converting %s' % os.path.join(fit_target_dir, file)) write_fitfile_to_csv( fitfile, new_filename, file, fit_target_dir, fit_processed_csv_dir, is_overwritten, fit_ignore_splits_and_laps, ) print('finished conversions')
def read_fitparse(input_file): log.info(f'opening {input_file}') fitfile = fitparse.FitFile(input_file) records = list(fitfile.get_messages('record')) local_tz = pytz.timezone('Europe/Rome') fmt = "%Y-%m-%d %H:%M:%S" for record in records: # all timestamps are in UTC timestamp = fit_timezone.localize(record.get('timestamp').value) local_timestamp = timestamp.astimezone(local_tz) speed_mt_sec = record.get('enhanced_speed').value if speed_mt_sec == 0.0: speed_min_km = 0.0 else: speed_min_km = 1000.0 / 60.0 * 1.0 / speed_mt_sec # speed_units = record.get('enhanced_speed').units print(f'[{local_timestamp.strftime(fmt)}] {speed_min_km}')
def parse_fit_to_gpx(fitname) -> gpxpy.gpx.GPX: fitfile = fitparse.FitFile(fitname) gpx = gpxpy.gpx.GPX() gpxTrack = gpxpy.gpx.GPXTrack() gpx.tracks.append(gpxTrack) gpxSegment = gpxpy.gpx.GPXTrackSegment() #gpxTrack.segments.append(gpxSegment) for message in fitfile.messages: if message.name == "lap": gpxTrack.segments.append(gpxSegment) # append the current TrackSegment to the GPX gpxSegment = gpxpy.gpx.GPXTrackSegment() # and start a new one for the records to follow # TODO: What happens if there is no 'lap' message at the end of the .fit file? if message.name == "record": gpxTrackPoint = gpxpy.gpx.GPXTrackPoint() for data in message.fields: if data.name == "position_long" and data.value != None: gpxTrackPoint.longitude = semicircles_to_degrees(data.value) if data.name == "position_lat" and data.value != None: gpxTrackPoint.latitude = semicircles_to_degrees(data.value) if data.name == "timestamp": gpxTrackPoint.time = data.value if data.name == "altitude": gpxTrackPoint.elevation = round(data.value, 8) if not (gpxTrackPoint.latitude and gpxTrackPoint.longitude): continue gpxSegment.points.append(gpxTrackPoint) return gpx
def read_fit_file(fit_input, allowed_fields, required_fields): # Open file using firparse library and assign messages to variable fitfile = fitparse.FitFile( fit_input, data_processor=fitparse.StandardUnitsDataProcessor()) messages = fitfile.messages # messages[10].fields data = [] for m in messages: skip = False fields = m.fields # create an empty set to collect data in mdata = {} # check for important data types for field in fields: # Only read allowed fields if field.name in allowed_fields: # 'timestamp' gets special treatment by converting to # local time zone if field.name == 'timestamp': mdata[field.name] = UTC.localize( field.value).astimezone(CST) else: # Zwift files have duplicate fields, one with value other # with 'None'. This now only adds fields the either don't # exist yet or with a value other than 'None' if field.name not in mdata or field.value != None: mdata[field.name] = field.value # Make sure all required fields have been read. If not, skip this item for rf in required_fields: if rf not in mdata: skip = True if not skip: # Append data to mdata if all required fields are present data.append(mdata) return data
def main(): files = os.listdir() fit_files = [file for file in files if file[-4:].lower() == '.fit'] if ALT_FILENAME: if not os.path.exists(ALT_LOG): os.system('touch %s' % ALT_FILENAME) file_list = [] else: file_list = read_log() for file in fit_files: if ALT_FILENAME: if file in file_list: continue new_filename = file[:-4] + '.csv' if os.path.exists(new_filename) and not ALT_FILENAME: #print('%s already exists. skipping.' % new_filename) continue fitfile = fitparse.FitFile( file, data_processor=fitparse.StandardUnitsDataProcessor()) print('converting %s' % file) write_fitfile_to_csv(fitfile, new_filename, file) print('finished conversions')