Exemple #1
0
def _preprocess_fitfile(file_path):
    ff = FitFile(file_path)
    data = []
    key_mapping = {
        "position_lat": "Latitude",
        "position_long": "Longitude",
        "heart_rate": "Heading",
        "enhanced_speed": "Speed",
        "timestamp": "Gentime"
    }
    for record in ff.get_messages('record'):
        record_dict = {
            metric.name:
            (metric.value if metric.name != "timestamp" else metric.raw_value)
            for metric in record
        }
        if all(required_key in record_dict.keys()
               for required_key in key_mapping):
            record_dict.update({
                'activityId':
                7,
                'type':
                'fitfile_upload',
                "position_lat":
                semicircles_to_degrees(record_dict["position_lat"]),
                "position_long":
                semicircles_to_degrees(record_dict["position_long"])
            })
            data.append(record_dict)
        else:
            print("skipping")
    return data, key_mapping
Exemple #2
0
def load_workout(workout_file):
    """
    Load fitfile and transforms
    it into a pandas Dataframe.
    Nan Values are replaced.
    :param workout_file:
    :return dataframe:
    """
    # Load the fitfile
    fitfile = FitFile(workout_file)

    # This is a ugly hack
    # to avoid timing issues
    while True:
        try:
            fitfile.messages
            break
        except KeyError:
            continue

    # Get all data messages that are of type record
    workout = []
    for record in fitfile.get_messages('record'):
        r = {}
        # Go through all the data entries in this record
        for record_data in record:
            r[record_data.name] = record_data.value

        workout.append(r)
    workout_df = pd.DataFrame(workout)
    workout_df.fillna(method='ffill', inplace=True)
    workout_df.fillna(method='backfill', inplace=True)
    return workout_df
Exemple #3
0
def get_points_from_fit(file_list, local_time=False, verbose=False):
    '''
    Read location and time stamps from a track in a FIT file.

    Returns a list of tuples (time, lat, lon, altitude)
    '''
    data = []
    for file in file_list:
        point = ()
        try:
            fit = FitFile(file)

            messages = fit.get_messages('gps_metadata')
            for record in tqdm(messages,
                               desc='Extracting GPS data from .FIT file'):
                timestamp = record.get('utc_timestamp').value
                timestamp = utc_to_localtime(
                    timestamp) if local_time else timestamp
                lat = semicircle_to_degrees(record.get('position_lat').value)
                lon = semicircle_to_degrees(record.get('position_long').value)
                altitude = record.get('altitude').value
                point = point + (timestamp, lat, lon, altitude)

        except ValueError:
            if verbose:
                print("File {} not formatted properly".format(file))
            pass
        if point:
            data.append(point)
    return data
Exemple #4
0
def fig(fn):
    fitfile = FitFile(fn)
    total = []
    for record in fitfile.get_messages('record'):
        values = record.get_values()
        total.append(values)

    df = pd.DataFrame(total)
    print(df.iloc[-1])

    try:
        df['lat'] = df.position_lat.map(lambda x: x * 180 / (2 << 30))
        df['long'] = df.position_long.map(lambda x: x * 180 / (2 << 30))
    except AttributeError as e:
        print(fn)
        print(e)
        return None

    print(df)
    # print(df.timestamp)

    fig, axes = plt.subplots(nrows=4, ncols=1)
    mpl.rcParams["figure.figsize"] = [240, 200]

    # df.plot(x='long', y='lat', ax=axes[0])
    df.plot(x='distance', y='speed', color='b', ax=axes[1])
    df.plot(x='distance', y='heart_rate', color='c', ax=axes[2])
    # df.plot.bar(x='timestamp', y=['heart_rate', 'cadence'], color=['b', 'c'])
    plt.tick_params(axis='x',
                    which='both',
                    bottom=False,
                    top=False,
                    labelbottom=False)
    plt.show()
    def parseFit(self):
        coordinates = []

        try:
            fitFile = FitFile(self.fileName, check_crc=True)
        except Exception as e:
            logging.debug("Error while parsing {} ".format(self.fileName))
            logging.debug(str(e))
            raise e

        i = 0
        for record in fitFile.get_messages('record'):
            # iterate that counter first
            i += 1

            # Ensures proper sampling.
            if i % self.modValue != 0:
                continue

            # Go through all the data entries in this record
            data = record.get_values()
            logging.debug("Message data: {}".format(data))
            # must convert lat and long to degrees as well
            lat = self.convertToDegrees(data.get("position_lat"))
            lon = self.convertToDegrees(data.get("position_long"))

            logging.debug("Position: ({}, {})".format(lat, lon))
            if lat is None or lon is None:
                continue
            coordinates.append([lat, lon])

        logging.debug("Found {} coordinate pairs".format(len(coordinates)))
        self.coordinateList = coordinates
Exemple #6
0
    def test_component_field_accumulaters(self):
        # TODO: abstract CSV parsing
        csv_fp = open(testfile('compressed-speed-distance-records.csv'), 'r')
        csv_file = csv.reader(csv_fp)
        next(csv_file)  # Consume header

        f = FitFile(testfile('compressed-speed-distance.fit'))
        f.parse()

        records = f.get_messages(name='record')
        empty_record = next(records)  # Skip empty record for now (sets timestamp via header)

        # File's timestamp record is < 0x10000000, so field returns seconds
        self.assertEqual(empty_record.get_value('timestamp'), 17217864)

        # TODO: update using local_timestamp as offset, since we have this value as 2012 date

        for count, (record, (timestamp, heartrate, speed, distance, cadence)) in enumerate(zip(records, csv_file)):
            # No fancy datetime stuff, since timestamp record is < 0x10000000
            fit_ts = record.get_value('timestamp')
            self.assertIsInstance(fit_ts, int)
            self.assertLess(fit_ts, 0x10000000)
            self.assertEqual(fit_ts, int(timestamp))

            self.assertEqual(record.get_value('heart_rate'), int(heartrate))
            self.assertEqual(record.get_value('cadence'), int(cadence) if cadence != 'null' else None)
            self.assertAlmostEqual(record.get_value('speed'), float(speed))
            self.assertAlmostEqual(record.get_value('distance'), float(distance))

        self.assertEqual(count, 753)  # TODO: confirm size(records) = size(csv)
        csv_fp.close()
Exemple #7
0
def extract(filename, mode):
    timestamp = []
    distance = []
    speed = []

    if csvmode:
        with open(filename) as csvfile:
            alldata = csv.reader(csvfile, delimiter=',')
            oneskipped = False
            for row in alldata: # discard line containing column title text
                if oneskipped:
                    timestamp.append(int(float(row[0])))
                    distance.append(float(row[1]))
                    speed.append(float(row[2])*3.6) # convert m/s to km/h here
                oneskipped = True
    else:
        fitfile = FitFile(filename)

        for record in fitfile.get_messages('record'):
            for field in record:
                if field.name == 'timestamp':
                    timestamp.append(field.value)
                elif field.name == 'distance':
                    distance.append(field.value)
                elif field.name == 'speed':
                    speed.append(field.value*3.6) # convert m/s to km/h here

    return timestamp, distance, speed
Exemple #8
0
def compute_activity_stats(path_to_file):
    fitfile = FitFile(path_to_file)
    records = [r for r in fitfile.get_messages('record')]
    assert len(
        records
    ) >= 2, path_to_file + "activity must contain at least two records"
    id1, id2 = 0, 1
    dist_in_meters = 0
    total_activity_time_in_seconds = 0
    while id2 < len(records):
        record_data1 = {r.name: r.value for r in records[id1]}
        record_data2 = {r.name: r.value for r in records[id2]}
        if record_data1.get("position_lat") is not None:
            lat1, long1 = float(
                record_data1["position_lat"] * 180 / math.pow(2, 31)), float(
                    record_data1["position_long"] * 180 / math.pow(2, 31))
            lat2, long2 = float(
                record_data2["position_lat"] * 180 / math.pow(2, 31)), float(
                    record_data2["position_long"] * 180 / math.pow(2, 31))
            dist_in_meters += exporter.haversine((lat1, long1), (lat2, long2))
        if record_data1.get("timestamp"):
            date2 = record_data2["timestamp"]
            date1 = record_data1["timestamp"]
            elapsed_time = date2 - date1
            minutes, seconds = divmod(elapsed_time.total_seconds(), 60)
            # print(date2, date1, "=====", minutes, seconds)
            total_activity_time_in_seconds = total_activity_time_in_seconds + seconds + minutes * 60
            # total_activity_time_in_seconds = total_activity_time_in_seconds + 5
        id1, id2 = id1 + 1, id2 + 1
    return (dist_in_meters / 1000), (total_activity_time_in_seconds / 60)
Exemple #9
0
def parse_fit_file(file_path, rename=False):
    # parses fit files
    fitfile = FitFile(file_path)
    hrs = []
    pwrs = []
    cads = []
    times = []
    stime = None
    try:
        for record in fitfile.get_messages('record'):
            rec_dict = record.get_values()
            if stime == None:
                stime = rec_dict[
                    'timestamp']  #stores the initial datetime for the ride
                if rename:
                    new_fname = stime.strftime(
                        "%Y-%m-%dT%H_%M_%S"
                    ) + '.fit'  # renames the file to the datetime
                    folder = file_path[:file_path.rfind('/') + 1]
                    os.rename(file_path, folder + new_fname)
            if ('power' in rec_dict) & ('cadence' in rec_dict) & ('heart_rate'
                                                                  in rec_dict):
                if (rec_dict['power']
                        is not None) & (rec_dict['cadence'] is not None) & (
                            rec_dict['heart_rate'] is not None
                        ):  # only include timestamps that have all 3 values
                    pwrs.append(rec_dict['power'])
                    cads.append(rec_dict['cadence'])
                    hrs.append(rec_dict['heart_rate'])
                    times.append(rec_dict['timestamp'])
    except AttributeError:  # for corrupt files
        pass
    return (np.array(hrs), np.array(pwrs), np.array(cads), stime)
Exemple #10
0
def select_activities_to_upload(conf, date_last_activity):
    folder = os.listdir(conf["garmin_activities_folder"])
    last_date = datetime.datetime.strptime(date_last_activity,
                                           '%Y-%m-%dT%H:%M:%SZ')
    activities_to_upload = []
    for file in folder:
        file_path = os.path.join(conf["garmin_activities_folder"], file)
        fitfile = FitFile(file_path)
        records = []
        for r in fitfile.get_messages('record'):
            records.append(r)
            break
        if len(records) > 0:
            for record_data in records[0]:
                if record_data.name == "timestamp":
                    starting_time = record_data.value
                    # starting_time_as_object = datetime.datetime.strptime(starting_time, '%Y-%m-%d %H:%M:%S')
                    if starting_time > last_date:
                        activities_to_upload.append(
                            (file_path,
                             starting_time.strftime('%Y-%m-%d %H:%M:%S')))
        else:
            print("No record for this activity " + file_path)

    # sort on activity name, older activity will be uploaded first
    activities_to_upload.sort(key=lambda elt: elt[0])
    return activities_to_upload
Exemple #11
0
def compute_trimp(file_name, minhr, maxhr, gender):
  ret = 0
  try:
    from fitparse import FitFile
  except ImportError:
    raise Exception("fitparse not found, please install python-fitparse from https://github.com/dtcooper/python-fitparse.git")
  
  fitfile = FitFile(file_name)
  
  hr = None
  ts = None
  old_ts = None
  
  for record in fitfile.get_messages('lap'):
    sport = record.get_value('sport')
    if sport != SPORT:
      return 0
    
  for record in fitfile.get_messages('record'):
    for record_data in record:
      if record_data.name == 'heart_rate':
        hr = record_data.value
      elif record_data.name == 'timestamp':
        old_ts = ts
        ts = time.mktime(datetime.datetime.strptime(str(record_data.value), "%Y-%m-%d %H:%M:%S").timetuple())
        if old_ts and hr:
          trimp = compute_frac_trimp(ts - old_ts, hr, minhr, maxhr, gender)
          ret += trimp
          
  return ret
Exemple #12
0
def main():
    '''Open Fit File and create lists of values depending on channel'''

    # Channel dictionary with a nested list of best efforts and a list of all the channels values.
    # [0] = 1 Minute
    # [1] = 5 Minutes
    # [2] = 10 Minutes
    # [3] = 15 Minutes
    # [4] = 20 Minutes
    d = {
        'power': [0, 0, 0, 0, 0, []],
        'speed': [0, 0, 0, 0, 0, []],
        'heart_rate': [0, 0, 0, 0, 0, []]
    }

    # Open and parse Fit File from user input
    with open(sys.argv[1], 'rb') as f:
        fitfile = FitFile(f)
        # Get all data messages that are of type record
        for r in fitfile.get_messages('record'):
            # For each record, find those with a name in our dictionary
            for k in d.keys():
                v = r.get_value(k)
                # Some values are set to None. Need to convert them to integers
                if v is None: v = 0
                # Append value in list of specified by dictionary key
                d[k][5].append(v)

    # Start processing of different ranges using parallel processing
    parallel(d)
Exemple #13
0
def check_fit(fit_file):
    """Look for blatant syntax errors, then read through each record in the
    .fit file to see if they can all be read properly.
    """
    try:
        fitfile = FitFile(fit_file.fileobj)
    except FitHeaderError:
        # Usually when the file is zero-length or truncated
        raise ActivityDefective(f'{fit_file.name} truncated')

    try:
        power_readings = []
        hr_readings = []
        for record in fitfile.get_messages('record'):
            for record_data in record:
                if record_data.name == 'power':
                    power_readings.append(record_data.value)
                elif record_data.name == 'heart_rate':
                    hr_readings.append(record_data.value)

        power = round(sum(power_readings) / len(power_readings))
        hr = round(sum(hr_readings) / len(hr_readings))
        LOG.info(f'{fit_file.name} Average Power: {power}, Average HR: {hr}')

    except (FitCRCError, FitParseError, FitHeaderError, FitEOFError) as err:
        raise ActivityDefective(err)
 def upload_file(self,driver,file):
   f = FitFile(file)
   f.parse()
   records = list(f.get_messages(name='record'))
   self.distance = records[-1].get_value('distance') / 1609.347219 
   self.time = str(records[-1].get_value('timestamp') - records[0].get_value('timestamp')).split(":")
   return True
Exemple #15
0
    def _parse_by_pattern(self, filename, patternList):
        fitfile = FitFile(filename)
        data = ""
        b_first = True
        try:
            for record in fitfile.get_messages():

                # Go through all the data entries in this record
                for record_data in record:

                    for pattern in patternList:
                        if record_data.name == "timestamp" and b_first:
                            b_first = False
                            data += "---------------------------------" + "\n"
                            data += str(record_data.value) + "\n"
                            data += "---------------------------------" + "\n"

                        if record_data.name == pattern:
                            data += pattern + ": " + str(
                                record_data.value) + "\n"
        except:
            print("Error")
            pass

        return data
Exemple #16
0
    def test_basic_file_with_one_record(self, endian='<'):
        f = FitFile(generate_fitfile(endian=endian))
        f.parse()

        self.assertEqual(f.profile_version, 1.52)
        self.assertEqual(f.protocol_version, 1.0)

        file_id = f.messages[0]
        self.assertEqual(file_id.name, 'file_id')

        for field in ('type', 0):
            self.assertEqual(file_id.get_value(field), 'activity')
            self.assertEqual(file_id.get(field).raw_value, 4)
        for field in ('manufacturer', 1):
            self.assertEqual(file_id.get_value(field), 'garmin')
            self.assertEqual(file_id.get(field).raw_value, 1)
        for field in ('product', 'garmin_product', 2):
            self.assertEqual(file_id.get_value(field), 'edge500')
            self.assertEqual(file_id.get(field).raw_value, 1036)
        for field in ('serial_number', 3):
            self.assertEqual(file_id.get_value(field), 558069241)
        for field in ('time_created', 4):
            self.assertEqual(file_id.get_value(field), secs_to_dt(723842606))
            self.assertEqual(file_id.get(field).raw_value, 723842606)
        for field in ('number', 5):
            self.assertEqual(file_id.get_value(field), None)
Exemple #17
0
    def test_basic_file_with_one_record(self, endian='<'):
        f = FitFile(generate_fitfile(endian=endian))
        f.parse()

        self.assertEqual(f.profile_version, 1.52)
        self.assertEqual(f.protocol_version, 1.0)

        file_id = f.messages[0]
        self.assertEqual(file_id.name, 'file_id')

        for field in ('type', 0):
            self.assertEqual(file_id.get_value(field), 'activity')
            self.assertEqual(file_id.get(field).raw_value, 4)
        for field in ('manufacturer', 1):
            self.assertEqual(file_id.get_value(field), 'garmin')
            self.assertEqual(file_id.get(field).raw_value, 1)
        for field in ('product', 'garmin_product', 2):
            self.assertEqual(file_id.get_value(field), 'edge500')
            self.assertEqual(file_id.get(field).raw_value, 1036)
        for field in ('serial_number', 3):
            self.assertEqual(file_id.get_value(field), 558069241)
        for field in ('time_created', 4):
            self.assertEqual(file_id.get_value(field), secs_to_dt(723842606))
            self.assertEqual(file_id.get(field).raw_value, 723842606)
        for field in ('number', 5):
            self.assertEqual(file_id.get_value(field), None)
Exemple #18
0
def gen_records(file_path):
    """Generator function for iterating over *.fit file messages.
    Parameters
    ----------
    file_path : str
        Path to the ANT/Garmin fit file.

    Yields
    ------
        Parsed messages from `file_path`.
    """
    fit_file = FitFile(file_path)

    messages = filter(message_filter, fit_file.get_messages())
    lap = 0
    session = -1

    for record in messages:
        if record.mesg_type.name == "record":
            message = record.get_values()
            message["lap"] = lap
            message["session"] = session
            yield message
        elif record.mesg_type.name == "lap":
            lap += 1
        elif record.mesg_type.name == "event":
            if record.get_value("event_type") == "start":
                # This happens whens an activity is
                # (manually or automatically) paused or
                # stopped and the resumed
                session += 1
        else:
            raise ValueError("Unknown message fit type {0}.".format(
                record.mesg_type.name))
Exemple #19
0
def main(argv):
    parser = argparse.ArgumentParser()
    parser.add_argument("-f",
                        "--fitfile",
                        type=str,
                        default="",
                        help="FIT file")
    parser.add_argument("-p",
                        "--ftp",
                        type=float,
                        default=4.5,
                        help="Functional Threshold Pace")
    parser.add_argument("-t",
                        "--threshold",
                        type=int,
                        default=155,
                        help="Heartrate threshold")
    args = parser.parse_args()

    user = User(args.ftp, args.threshold)

    try:
        fitfile = FitFile(args.fitfile)
        fitfile.parse()
    except FitParseError, e:
        print "Error while parsing .FIT file: %s" % e
        sys.exit(1)
Exemple #20
0
    def test_component_field_resolves_subfield(self):
        fit_data = generate_fitfile(
            generate_messages(
                # event (21), local message 1
                mesg_num=21, local_mesg_num=1, field_defs=[
                    # event, event_type, data16
                    (0, 'enum'), (1, 'enum'), (2, 'uint16'),
                ],
                data=[[0, 0, 2]],
            )
        )

        f = FitFile(fit_data)
        f.parse()

        event = f.messages[1]
        self.assertEqual(event.name, 'event')
        for field in ('event', 0):
            self.assertEqual(event.get_value(field), 'timer')
            self.assertEqual(event.get(field).raw_value, 0)
        for field in ('event_type', 1):
            self.assertEqual(event.get_value(field), 'start')
            self.assertEqual(event.get(field).raw_value, 0)

        # Should be able to reference by original field name,
        # component field name, subfield name, and then the field def_num of both
        # the original field and component field
        for field in ('timer_trigger', 'data', 3):
            self.assertEqual(event.get_value(field), 'fitness_equipment')
            self.assertEqual(event.get(field).raw_value, 2)

        # Component field should be left as is
        for field in ('data16', 2):
            self.assertEqual(event.get_value(field), 2)
Exemple #21
0
 def test_mismatched_field_size(self):
     f = FitFile(testfile('coros-pace-2-cycling-misaligned-fields.fit'))
     with warnings.catch_warnings(record=True) as w:
         f.parse()
         assert w
         assert all("falling back to byte encoding" in str(x) for x in w)
     self.assertEqual(len(f.messages), 11293)
Exemple #22
0
    def test_component_field_resolves_subfield(self):
        fit_data = generate_fitfile(
            generate_messages(
                # event (21), local message 1
                mesg_num=21, local_mesg_num=1, field_defs=[
                    # event, event_type, data16
                    (0, 'enum'), (1, 'enum'), (2, 'uint16'),
                ],
                data=[[0, 0, 2]],
            )
        )

        f = FitFile(fit_data)
        f.parse()

        event = f.messages[1]
        self.assertEqual(event.name, 'event')
        for field in ('event', 0):
            self.assertEqual(event.get_value(field), 'timer')
            self.assertEqual(event.get(field).raw_value, 0)
        for field in ('event_type', 1):
            self.assertEqual(event.get_value(field), 'start')
            self.assertEqual(event.get(field).raw_value, 0)

        # Should be able to reference by original field name,
        # component field name, subfield name, and then the field def_num of both
        # the original field and component field
        for field in ('timer_trigger', 'data', 3):
            self.assertEqual(event.get_value(field), 'fitness_equipment')
            self.assertEqual(event.get(field).raw_value, 2)

        # Component field should be left as is
        for field in ('data16', 2):
            self.assertEqual(event.get_value(field), 2)
Exemple #23
0
def importFit(fileName, fieldName):
     now = datetime.datetime.now()

     if fileName:
          print ("importing file %s" % (fileName))
          print
          print ("Start " + now.strftime("%H:%M:%S"))
          print

          fitfile = FitFile(fileName)

          # Get all data messages that are of type record
          for record in fitfile.get_messages(fieldName):

               # Go through all the data entries in this record
               for record_data in record:

                    # Print the records name and value (and units if it has any)
                    if record_data.units:
                         print (" * %s: %s %s" % (
                         record_data.name, record_data.value, record_data.units,
                         ))
                    else:
                         print (" * %s: %s" % (record_data.name, record_data.value))
               print

          now = datetime.datetime.now()

          print
          print ("End " + now.strftime("%H:%M:%S"))
          print
     else:
          print ("Missing filname")
Exemple #24
0
def main(video_filename,
         fit_filename,
         output_filename,
         fit_offset=0,
         duration=0,
         strain=150,
         bitrate='34000000'):
    v = VideoFileClip(video_filename)
    f = FitFile(fit_filename)

    fit = list()
    lfit = 0
    altgain = 0
    lastchange = 0
    for msg in f.get_messages('record'):
        d = dict()
        d.update(msg.get_values())
        fit.append(d)
        lfit += 1
        if lfit > 1:
            try:
                gain = fit[-1].get('altitude') - fit[-2].get('altitude')
            except:
                gain = 0
            if gain > 0:
                altgain += gain
            fit[-1]['altgain'] = altgain

    ovr = OVR(v, fit, fit_offset, strain)
    if duration:
        nv = v.subclip(t_end=duration).fl_image(ovr)
    else:
        nv = v.fl_image(ovr)
    nv.write_videofile(output_filename, progress_bar=True, bitrate=bitrate)
Exemple #25
0
def get_start_end(file):
    print(utils.get_name(file))
    result = {}
    fitfile = FitFile(file)

    # Get all data messages that are of type record
    for i, record in enumerate(fitfile.get_messages("record")):
        # Go through all the data entries in this record
        for record_data in record:
            if not str(record_data.name) in list(result.keys()):
                result[str(record_data.name)] = {}

            result[record_data.name][i] = record_data.value
        result = fill_na(result, i)

    df = pd.DataFrame().from_dict(result)
    good = False
    while not good:
        ax = df.loc[:, ["cadence", "power", "heart_rate"]].plot()
        plt.show()
        start = int(input("start? "))
        end = int(input("end? "))
        ax = df.loc[start:end, ["cadence", "power", "heart_rate"]].plot()
        plt.show()
        good = input("good? (y/n) ") == "y"
    info = utils.get_info(file)
    info["start(sec)"] = str(start)
    info["end(sec)"] = str(end)
    utils.set_info(file, info)
    df.to_csv(file.replace(".fit", ".csv"))
    sns.lmplot(x="power", y="heart_rate", data=df)
Exemple #26
0
def fit_parse(fitDir):
    """
    fitDir is the file path to the Fit File to parse.
    Function will return a DataFrame
    """   
    print(f'Running "fit_parse" for: {fitDir}')
    fitfile = FitFile(fitDir)
    dataOutput={}
    workout = []
    messageFit=['activity','file_id','session','lap','record','device_info','event'
    # ,'segment_lap'
    ]
    for j in messageFit:
        workout = []
        for records in fitfile.get_messages(j):
            r = {}
            for record_data in records:
                print(j)
                r[record_data.name] = record_data.value
                workout.append(r)
                dataOutput.update({j:workout})
        workout=pd.DataFrame(dataOutput[j])
        workout.drop_duplicates(inplace=True)
        dataOutput.update({j:workout})
        # {i:dataOutput[i].to_csv() for i in dataOutput}
    return {'DataFrame':dataOutput, "json":json.dumps({i:dataOutput[i].to_csv() for i in dataOutput})}
Exemple #27
0
def load(fname):
    """
    This method uses the Python fitparse library to load a FIT file into a WorkoutDataFrame.
    It is tested with a Garmin FIT file but will probably work with other FIT files too.
    Columns names are translated to sweat terminology (e.g. "heart_rate" > "heartrate").

    Parameters
    ----------
    fname : str

    Returns
    -------
    wdf : WorkoutDataFrame
    """

    fitfile = FitFile(fname)

    records = []
    for record in fitfile.get_messages('record'):
        records.append(record.get_values())

    wdf = dataframes.WorkoutDataFrame(records)

    wdf = wdf.rename(columns={'heart_rate': 'heartrate'})

    wdf.index = (wdf.timestamp - wdf.timestamp[0]) / np.timedelta64(1, 's')
    wdf.index = wdf.index.astype(int)

    return wdf
Exemple #28
0
    def test_component_field_accumulaters(self):
        # TODO: abstract CSV parsing
        csv_fp = open(testfile('compressed-speed-distance-records.csv'), 'r')
        csv_file = csv.reader(csv_fp)
        next(csv_file)  # Consume header

        f = FitFile(testfile('compressed-speed-distance.fit'))
        f.parse()

        records = f.get_messages(name='record')
        empty_record = next(records)  # Skip empty record for now (sets timestamp via header)

        # File's timestamp record is < 0x10000000, so field returns seconds
        self.assertEqual(empty_record.get_value('timestamp'), 17217864)

        # TODO: update using local_timestamp as offset, since we have this value as 2012 date

        for count, (record, (timestamp, heartrate, speed, distance, cadence)) in enumerate(zip(records, csv_file)):
            # No fancy datetime stuff, since timestamp record is < 0x10000000
            fit_ts = record.get_value('timestamp')
            self.assertIsInstance(fit_ts, int)
            self.assertLess(fit_ts, 0x10000000)
            self.assertEqual(fit_ts, int(timestamp))

            self.assertEqual(record.get_value('heart_rate'), int(heartrate))
            self.assertEqual(record.get_value('cadence'), int(cadence) if cadence != 'null' else None)
            self.assertAlmostEqual(record.get_value('speed'), float(speed))
            self.assertAlmostEqual(record.get_value('distance'), float(distance))

        self.assertEqual(count, 753)  # TODO: confirm size(records) = size(csv)
        csv_fp.close()
Exemple #29
0
 def _parse_metadata(self):
     self.file_name = self.get_file_name_from_path(self.path_to_file)
     try:
         self.fit = FitFile(self.path_to_file)
     except FitEOFError as e:
         log.error(f"Error reading fit file {self.path_to_file}: {e}",
                   exc_info=True)
Exemple #30
0
    def import_fit(self, file_data):
        FF = FitFile(file_data)

        for record in FF.get_messages('record'):
            r = {}
            for entry in record:
                r[entry.name] = entry.value
            self.records.append(r)
Exemple #31
0
def fit_decode(file2parse):
    try:
        fitfile = FitFile(file2parse)
        fitfile.parse()
        all_fit_show(fitfile)
    except FitParseError as e:
        print("Error while parsing .FIT file: %s % e")
        sys.exit(1)
Exemple #32
0
    def import_file(self, file):
        try:
            fitfile = FitFile(file)
            filename = os.path.basename(file)
            filehash = fitfile.digest

            creation_date = ""
            event_name = ""
            event_sport = ""
            event_subsport = ""

            #            if not result:
            self.logging.info("Importing file %s", filename)

            for record in fitfile.get_messages():
                for record_data in record:
                    if record.name == "file_id" and record_data.name == "time_created":
                        creation_date = record_data.value
                    if record.name == "sport":
                        if record_data.name == "sport":
                            event_sport = str(record_data.value)
                        if record_data.name == "sub_sport":
                            event_subsport = str(record_data.value)
                        if record_data.name == "name":
                            event_name = record_data.value.decode('utf-8')

            try:
                if event_sport in self._plugins:
                    file_insert = self._file_table.insert()
                    f_id = file_insert.values(file_name=filename,
                                              file_hash=filehash,
                                              creation_date=creation_date,
                                              event_name=event_name,
                                              event_type=event_sport,
                                              event_subtype=event_subsport)
                    conn = self._alchemy_logbook.connect()
                    conn.execute(f_id)

                    if event_sport not in self._plugins:
                        self.logging.info(
                            "delegating the import to plugin \"default\"")
                        self._plugins["default"].import_fit(fitfile)
                    else:
                        self.logging.info(
                            "delegating the import to plugin \"%s\"" %
                            event_sport)
                        self._plugins[event_sport].import_fit(fitfile)

            except Exception as e:
                self.logging.debug(e)

            self.logging.info("Import finished")

        except Exception as e:
            #            self.logging.error("Error importing file")
            print(e)

        self.read_events()
Exemple #33
0
def convert(filename):
    document = create_document()
    element = create_sub_element(document.getroot(), "Activities")

    activity = FitFile(filename)
    activity.parse()
    add_activity(element, activity)

    return document
Exemple #34
0
def grab_data(filename, target_units=[]):
    fitfile = FitFile(filename)
    results = defaultdict(list)
    for record in fitfile.get_messages('record'):
        for record_data in record:
            if len(target_units) > 0 and record_data.name not in target_units:
                continue
            results[record_data.name].append(record_data.value)
    return results
Exemple #35
0
def convert(filename):
    document = create_document()
    element = create_sub_element(document.getroot(), "Activities")

    activity = FitFile(filename)
    activity.parse()
    add_activity(element, activity)

    return document
Exemple #36
0
def date_file(file: str):
    fitfile = FitFile(file)
    # Get all data messages that are of type record
    for record in fitfile.get_messages("record"):
        for record_data in record:
            if record_data.name == "timestamp":
                print(record_data)
                return record_data.value
        break
Exemple #37
0
    def test_parsing_edge_500_fit_file(self):
        csv_messages = csv.reader(open(testfile('garmin-edge-500-activitiy-records.csv'), 'rb'))
        field_names = csv_messages.next()  # Consume header

        f = FitFile(testfile('garmin-edge-500-activitiy.fit'))
        messages = f.get_messages(name='record')

        # For fixups
        last_valid_lat, last_valid_long = None, None

        for message, csv_message in zip(messages, csv_messages):
            for csv_index, field_name in enumerate(field_names):
                fit_value, csv_value = message.get_value(field_name), csv_message[csv_index]
                if field_name == 'timestamp':
                    # Adjust GMT to PDT and format
                    fit_value = (fit_value - datetime.timedelta(hours=7)).strftime("%a %b %d %H:%M:%S PDT %Y")

                # Track last valid lat/longs
                if field_name == 'position_lat':
                    if fit_value is not None:
                        last_valid_lat = fit_value
                if field_name == 'position_long':
                    if fit_value is not None:
                        last_valid_long = fit_value

                # ANT FIT SDK Dump tool does a bad job of logging invalids, so fix them
                if fit_value is None:
                    # ANT FIT SDK Dump tool cadence reports invalid as 0
                    if field_name == 'cadence' and csv_value == '0':
                        csv_value = None
                    # ANT FIT SDK Dump tool invalid lat/lng reports as last valid
                    if field_name == 'position_lat':
                        fit_value = last_valid_lat
                    if field_name == 'position_long':
                        fit_value = last_valid_long

                if isinstance(fit_value, (int, long)):
                    csv_value = int(csv_value)

                if isinstance(fit_value, float):
                    # Float comparison
                    self.assertAlmostEqual(fit_value, float(csv_value))
                else:
                    self.assertEqual(fit_value, csv_value)

        try:
            messages.next()
            self.fail(".FIT file had more than csv file")
        except StopIteration:
            pass

        try:
            csv_messages.next()
            self.fail(".CSV file had more messages than .FIT file")
        except StopIteration:
            pass
Exemple #38
0
    def test_subfield_components(self):
        # sore = 123, opponent_score = 456, total = 29884539
        sport_point_value = 123 + (456 << 16)
        # rear_gear_num = 4, rear_gear, = 20, front_gear_num = 2, front_gear = 34
        gear_chance_value = 4 + (20 << 8) + (2 << 16) + (34 << 24)

        fit_data = generate_fitfile(
            generate_messages(
                # event (21), local message 1
                mesg_num=21, local_mesg_num=1, field_defs=[
                    # event, data
                    (0, 'enum'), (3, 'uint32'),
                ],
                data=[
                    # sport point
                    [33, sport_point_value],
                    # front gear change
                    [42, gear_chance_value],
                ],
            )
        )

        f = FitFile(fit_data)
        f.parse()

        sport_point = f.messages[1]
        self.assertEqual(sport_point.name, 'event')
        for field in ('event', 0):
            self.assertEqual(sport_point.get_value(field), 'sport_point')
            self.assertEqual(sport_point.get(field).raw_value, 33)
        for field in ('sport_point', 'data', 3):
            # Verify raw numeric value
            self.assertEqual(sport_point.get_value(field), sport_point_value)
        for field in ('score', 7):
            self.assertEqual(sport_point.get_value(field), 123)
        for field in ('opponent_score', 8):
            self.assertEqual(sport_point.get_value(field), 456)

        gear_change = f.messages[2]
        self.assertEqual(gear_change.name, 'event')
        for field in ('event', 0):
            self.assertEqual(gear_change.get_value(field), 'front_gear_change')
            self.assertEqual(gear_change.get(field).raw_value, 42)
        for field in ('gear_change_data', 'data', 3):
            # Verify raw numeric value
            self.assertEqual(gear_change.get_value(field), gear_chance_value)
        for field in ('front_gear_num', 9):
            self.assertEqual(gear_change.get_value(field), 2)
        for field in ('front_gear', 10):
            self.assertEqual(gear_change.get_value(field), 34)
        for field in ('rear_gear_num', 11):
            self.assertEqual(gear_change.get_value(field), 4)
        for field in ('rear_gear', 12):
            self.assertEqual(gear_change.get_value(field), 20)
Exemple #39
0
def load_power_from_fit(filename):
    """ Method to open the power data from FIT file into a numpy array.

    Parameters
    ----------
    filename: str,
        Path to the FIT file.
    """

    # Check that the filename has the good extension
    if filename.endswith('.fit') is not True:
        raise ValueError('The file does not have the right extension.'
                         ' Expected *.fit.')

    # Check if the file exists
    if isfile(filename) is not True:
        raise ValueError('The file does not exist. Please check the path.')

    # Create an object to open the activity
    activity = FitFile(filename)
    activity.parse()

    # Get only the power records
    records = list(activity.get_messages(name='record'))
    # Check that you have some records to analyse
    if len(records) == 0:
        raise ValueError('There is no data inside the FIT file.')

    # Append the different values inside a list which will be later
    # converted to numpy array
    power_rec = np.zeros((len(records), ))
    # Go through each record
    # In order to send multiple warnings
    warnings.simplefilter('always', UserWarning)
    warn_sample = 0
    for idx_rec, rec in enumerate(records):
        # Extract only the value regarding the power
        p = rec.get_value('power')
        if p is not None:
            power_rec[idx_rec] = float(p)
        else:
            # We put the value to 0 since that it will not influence
            # the computation of the RPP
            power_rec[idx_rec] = 0.
            # We keep track of the number of inconsitent data
            warn_sample += 1

    # Through a warning if there is no power data found
    if len(records) == warn_sample:
        warnings.warn('This file does not contain any power data.'
                      'Be aware.')

    return power_rec
Exemple #40
0
def main():
    if len(sys.argv) == 3:
        filename = sys.argv[1]
        output_filename = sys.argv[2]
    else:
        print 'Usage: {0} [FIT input file] [HTML output file]'.format(sys.argv[0])
        return

    moving_window = 1
    fit = FitFile(filename)
    fit.parse()

    # Gets the start timestamp
    start_time = None
    for message in fit.get_messages(name = 'record'):
        start_time = message.get_value('timestamp')
        break

    last_rmssd_time = None
    hrv_points = []
    datapoint = None
    hrv_results = [["Duration", "Avg HR", "HRV"]]
    for message in fit.messages:
        if message.mesg_num == 20:
            if datapoint != None and datapoint.has_rr():
                hrv_points.append(datapoint)

                if datapoints_duration_in_minutes(hrv_points) > moving_window:
                    if last_rmssd_time == None or (hrv_points[-1].timestamp - last_rmssd_time).total_seconds() / 60 > 0.5 * moving_window:
                        last_rmssd_time = hrv_points[-1].timestamp
                        result = rmssd(hrv_points, moving_window)
                        if result != None:
                            hrv_results.append([(result[0] - start_time).total_seconds(), result[1], result[2]])
    
            datapoint = _HrvPoint(message.get_value('timestamp'), message.get_value('heart_rate'))
        elif message.mesg_num == 78:
            if datapoint != None:
                datapoint.add_hrv(message.get_value('time'))
        elif message.name == 'event':
            print '{0} Event: {1} {2} {3}'.format(
                    message.get_value('timestamp'), message.get_value('event'), message.get_value('event_type'),
                    message.get_value('data'))
        elif message.name in ['session', 'lap']:
            print '{0} {1}'.format(message.get_value('timestamp'), message.name)
            for f in message.fields:
                if f.value is None or f.name in ['timestamp']:
                    continue
                print '        {0} : {1}'.format(f.name, f.value)
    
    with open('fit_hrv_template.html', 'r') as template:
        with open(output_filename, 'w') as output:
            output.write(template.read().replace("%HRVDATA%", json.dumps(hrv_results)))
Exemple #41
0
class Parser(object):
    track = None

    def __init__(self, path):
        self.fitfile = FitFile(path)

    def convert(self):
        if self.track:
            return self.track

        self.track = Track()
        map(self._parse_entry, self.fitfile.get_messages())

        return self.track

    def _parse_entry(self, point_data):
        if point_data.get_value('time_created'):
            self.track.created_at = point_data.get_value('time_created')
        elif len(self.track) == 0 and point_data.get_value('event_type') == 'start':
            self.track.lap()
        elif point_data.get_value('event') == 'lap':
            self.track.lap()
        elif point_data.get_value('event_type') == 'stop_all':
            self.track.stop(point_data.get_value('timestamp'))
        elif point_data.get_value('event_type') == 'start':
            # Track resumes when a normal point is recived.
            pass
        elif point_data.get_value('position_lat'):  # regular point
            self.track.save_entry([point_data.get_value('position_lat'), point_data.get_value('position_long')],
                                  point_data.get_value('timestamp'), point_data.get_value('distance'),
                                  point_data.get_value('altitude'), point_data.get_value('heart_rate'),
                                  point_data.get_value('cadence'), point_data.get_value('temperature'))
Exemple #42
0
def load_power_from_fit(filename):
    """Method to open the power data from FIT file into a pandas dataframe.

    Parameters
    ----------
    filename : str,
        Path to the FIT file.

    Returns
    -------
    data : DataFrame
        Power records of the ride.

    """
    filename = check_filename_fit(filename)
    activity = FitFile(filename)
    activity.parse()
    records = activity.get_messages(name='record')

    data = defaultdict(list)
    for rec in records:
        values = rec.get_values()
        for key in FIELDS_DATA:
            data[key].append(values.get(key, np.NaN))

    data = pd.DataFrame(data)
    if data.empty:
        raise IOError('The file {} does not contain any data.'.format(
            filename))

    # rename the columns for consistency
    data.rename(columns={'heart_rate': 'heart-rate', 'altitude': 'elevation'},
                inplace=True)

    data.set_index(FIELDS_DATA[0], inplace=True)
    del data.index.name

    return data
  def get_weather(self,fit_file):
    '''

  '''
    f = FitFile(fit_file)
    f.parse()
    records = list(f.get_messages(name='record'))
    lat = records[0].get_value('position_lat')
    long = records[0].get_value('position_long')
    lat  = str(lat * (180.0 / 2**31))
    long = str(long * (180.0 / 2**31))
    #Grab all the resources
    url = "http://api.wunderground.com/api/bdf13372b1f7e319/conditions/q/"+lat+","+long+".json"
    r = requests.get(url)
    j = r.json()
    return {'weather_string':j['current_observation']['weather']  + " " + \
                             j['current_observation']['temperature_string'] + " " +\
                             j['current_observation']['wind_string'],
            'temp_f':j['current_observation']['temp_f'],
            'temp_c':j['current_observation']['temp_c'],
            'humidity':j['current_observation']['relative_humidity'],
            'wind_speed':j['current_observation']['wind_mph'],
            'wind_gust_mph':j['current_observation']['wind_gust_mph']}
Exemple #44
0
def convert(
    filename,
    tz_is_local=False,
    dist_recalc=False,
    speed_recalc=False,
    calibrate=False,
    per_lap_cal=False,
    manual_lap_distance=None,
    current_cal_factor=100.0,
):
    """Convert a FIT file to TCX format"""

    # Calibration with manual lap distances implies
    # per-lap calibration:
    if calibrate and manual_lap_distance is not None:
        per_lap_cal = True

    document = create_document()
    element = create_sub_element(document.getroot(), "Activities")

    try:
        activity = FitFile(filename, data_processor=MyDataProcessor())
        activity.parse()
        if tz_is_local:
            lat = None
            lon = None
            for trackpoint in activity.get_messages("record"):
                if lat is not None and lon is not None:
                    break
                lat = trackpoint.get_value("position_lat")
                lon = trackpoint.get_value("position_long")
            activity = FitFile(filename, data_processor=TZDataProcessor(lat=lat, lon=lon))
            activity.parse()

        session = next(activity.get_messages("session"))
        total_activity_distance = session.get_value("total_distance")
        total_calculated_distance = sum_distance(activity)
        activity_scaling_factor = total_calculated_distance / total_activity_distance
        new_cal_factor = activity_scaling_factor * current_cal_factor

        actelem, total_distance = add_activity(
            element,
            session,
            activity,
            dist_recalc,
            speed_recalc,
            calibrate,
            current_cal_factor,
            per_lap_cal,
            manual_lap_distance,
            activity_scaling_factor,
        )
    except FitParseError as e:
        sys.stderr.write(str("Error while parsing .FIT file: %s" % e) + "\n")
        sys.exit(1)

    if dist_recalc:
        distance_used = total_calculated_distance
    elif calibrate:
        distance_used = total_distance
    else:
        distance_used = total_activity_distance

    method = ""
    if dist_recalc or speed_recalc or calibrate:
        parts = []

        if calibrate:
            if per_lap_cal:
                parts.append("calibration applied per lap")
            else:
                parts.append("calibration applied")
        if dist_recalc and speed_recalc:
            parts.append("speed and distance recalculated")
        elif dist_recalc:
            parts.append("distance recalculated")
        elif speed_recalc:
            parts.append("speed recalculated")

        if calibrate and manual_lap_distance is not None:
            reference = " from known distance and/or GPS"
        elif dist_recalc or speed_recalc:
            reference = " from GPS"

        method = "(" + ", ".join(parts) + reference + ")"

    notes = (
        "{distance_used:.3f} km in {total_time!s} {dist_method:s}\n"
        "Distance in FIT file: {fit_dist:.3f} km; "
        "calculated via GPS: {gps_dist:.3f} km "
        "(precision: {precision:.1f}%)\n"
        "Footpod calibration factor setting: {old_cf:.1f}%; "
        "new factor based on recomputed distance: {new_cf:.1f}%"
    ).format(
        distance_used=distance_used / 1000,
        total_time=timedelta(seconds=int(session.get_value("total_timer_time"))),
        fit_dist=total_activity_distance / 1000,
        gps_dist=total_calculated_distance / 1000,
        precision=(1 - (abs(total_calculated_distance - total_activity_distance) / total_calculated_distance)) * 100,
        old_cf=current_cal_factor,
        new_cf=new_cal_factor,
        dist_method=method,
    )
    add_notes(actelem, notes)
    add_creator(
        actelem, activity.messages[0].get_value("manufacturer").capitalize(), activity.messages[0].get_value("product")
    )
    add_author(document)
    return document
FORECAST_API_KEY='8261b9ec237c8876cbd21d05c6447452'
GOOGLE_API_KEY='AIzaSyCBFTk2JuESrKlpab-LYNwG56zXI6eUka8'
# We don't really use WU anymore
#WEATHERUNDERGROUND_API_KEY='01c440c2fae9b2b2'

parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('file', type=argparse.FileType('r'), nargs=1,
                    help='File to be parsed (.fit)')

args = parser.parse_args()

input_file_path = Path(args.file[0].name)

with open(input_file_path, 'rb') as input_file:
  try:
    fit_file = FitFile(input_file, check_crc=False)
    fit_file.parse()
  except FitParseError as err:
    print('Error while parsing {}: {}'.format(input_file.relpath(), err))
    sys.exit(1)

# Build our api instances
gmaps = googlemaps.Client(key=GOOGLE_API_KEY)
tf = TimezoneFinder()

moments = []
j=0
for i, record in enumerate(fit_file.messages):
  if record.mesg_num == 20:
    lat = convert_to_degrees(record.get_value('position_lat'))
    lng = convert_to_degrees(record.get_value('position_long'))
Exemple #46
0
from fitparse import FitFile

name = 'testfile.FIT'

fitfile = FitFile(name)
fitfile.parse()

records = list(fitfile.get_messages())

for r in records:
    print('==> {}'.format(r))
    for k, v in r.get_values().items():
        print('  {} = {}'.format(k, v))
Exemple #47
0
 def __init__(self, path):
     self.fitfile = FitFile(path)
Exemple #48
0
def convert(filename,
            time_zone="auto",
            dist_recalc=False,
            speed_recalc=False,
            calibrate=False,
            per_lap_cal=False,
            manual_lap_distance=None,
            current_cal_factor=100.0):
    """Convert a FIT file to TCX format"""

    # Calibration requires either GPS recalculation or manual lap distance(s):
    if calibrate and not dist_recalc and manual_lap_distance is None:
        sys.stderr.write("Calibration requested, enabling distance recalculation from GPS/footpod.\n")
        dist_recalc = True

    # Calibration with manual lap distances implies
    # per-lap calibration:
    if calibrate and manual_lap_distance is not None:
        per_lap_cal = True

    document = create_document()
    element = create_sub_element(document.getroot(), "Activities")

    try:
        
        if time_zone == "auto":
            # We need activity object to be able to get trackpoints,
            # before re-creating activity again with timezone info
            activity = FitFile(filename,
                            check_crc=False,
                            data_processor=MyDataProcessor())
            activity.parse()
            lat = None
            lon = None
            for trackpoint in activity.get_messages('record'):
                if lat is not None and lon is not None:
                    break
                lat = trackpoint.get_value("position_lat")
                lon = trackpoint.get_value("position_long")
            if lat is not None and lon is not None:
                activity = FitFile(filename,
                                   check_crc=False,
                                   data_processor=TZDataProcessor(lat=lat,
                                                                  lon=lon))
        else:
            activity = FitFile(filename,
                               check_crc=False,
                               data_processor=TZDataProcessor(tzname=time_zone))
        activity.parse()

        session = next(activity.get_messages('session'))
        total_activity_distance = session.get_value('total_distance')
        total_calculated_distance = sum_distance(activity)
        activity_scaling_factor = (total_calculated_distance /
                                   total_activity_distance)
        new_cal_factor = activity_scaling_factor * current_cal_factor

        actelem, total_distance = add_activity(element,
                                               session,
                                               activity,
                                               dist_recalc,
                                               speed_recalc,
                                               calibrate,
                                               current_cal_factor,
                                               per_lap_cal,
                                               manual_lap_distance,
                                               activity_scaling_factor)
    except FitParseError as e:
        sys.stderr.write(str("Error while parsing .FIT file: %s" % e) + "\n")
        sys.exit(1)

    if dist_recalc:
        distance_used = total_calculated_distance
    elif calibrate:
        distance_used = total_distance
    else:
        distance_used = total_activity_distance

    method = ""
    if dist_recalc or speed_recalc or calibrate:
        parts = []

        if calibrate:
            if per_lap_cal:
                parts.append("calibration applied per lap")
            else:
                parts.append("calibration applied")
        if dist_recalc and speed_recalc:
            parts.append("speed and distance recalculated")
        elif dist_recalc:
            parts.append("distance recalculated")
        elif speed_recalc:
            parts.append("speed recalculated")

        if calibrate and manual_lap_distance is not None:
            reference = " from known distance (with GPS fill-in)"
        elif dist_recalc or speed_recalc:
            reference = " from GPS/footpod"

        method = "(" + ", ".join(parts) + reference + ")"

    notes = ("{total_laps:d} laps: {distance_used:.3f} km in {total_time!s} {dist_method:s}\n"
             "Distance in FIT file: {fit_dist:.3f} km; "
             "calculated via GPS/footpod: {gps_dist:.3f} km "
             "(precision: {precision:.1f}%)\n"
             "Footpod calibration factor setting: {old_cf:.1f}%; "
             "new factor based on recomputed distance: {new_cf:.1f}%"
             ).format(total_laps=session.get_value('num_laps'),
                      distance_used=distance_used / 1000,
                      total_time=timedelta(seconds=int(session.get_value(
                          'total_timer_time'))),
                      fit_dist=total_activity_distance / 1000,
                      gps_dist=total_calculated_distance / 1000,
                      precision=(1 - (abs(total_calculated_distance -
                                          total_activity_distance) /
                                      total_calculated_distance)) * 100,
                      old_cf=current_cal_factor,
                      new_cf=new_cal_factor,
                      dist_method=method)
    add_notes(actelem, notes)
    try:
        dinfo = next(activity.get_messages('device_info'))
        manufacturer = dinfo.get_value('manufacturer').title().replace('_', ' ')
        product_name = dinfo.get_value('descriptor').replace('_', ' ')
        product_id = dinfo.get_value('product')
        serial_number = dinfo.get_value('serial_number')
    except: # if no device_info message, StopIteration is thrown
        fid = next(activity.get_messages('file_id'))
        manufacturer = fid.get_value('manufacturer').title().replace('_', ' ')
        product_id = fid.get_value('product')
        product_name = PRODUCT_MAP[product_id] if product_id in PRODUCT_MAP else product_id
        serial_number = fid.get_value('serial_number')
    add_creator(actelem,
                manufacturer,
                product_name,
                product_id,
                serial_number
                )
    add_author(document)
    return document
Exemple #49
0
 def test_speed(self):
     f = FitFile(testfile('2019-02-17-062644-ELEMNT-297E-195-0.fit'))
     avg_speed = list(f.get_messages('session'))[0].get_values().get('avg_speed')
     self.assertEqual(avg_speed, 5.86)
Exemple #50
0
    def _csv_test_helper(self, fit_file, csv_file):
        csv_fp = open(testfile(csv_file), 'r')
        csv_messages = csv.reader(csv_fp)
        field_names = next(csv_messages)  # Consume header

        f = FitFile(testfile(fit_file))
        messages = f.get_messages(name='record')

        # For fixups
        last_valid_lat, last_valid_long = None, None

        for message, csv_message in zip(messages, csv_messages):
            for csv_index, field_name in enumerate(field_names):
                fit_value, csv_value = message.get_value(field_name), csv_message[csv_index]
                if field_name == 'timestamp':
                    # Adjust GMT to PDT and format
                    fit_value = (fit_value - datetime.timedelta(hours=7)).strftime("%a %b %d %H:%M:%S PDT %Y")

                # Track last valid lat/longs
                if field_name == 'position_lat':
                    if fit_value is not None:
                        last_valid_lat = fit_value
                if field_name == 'position_long':
                    if fit_value is not None:
                        last_valid_long = fit_value

                # ANT FIT SDK Dump tool does a bad job of logging invalids, so fix them
                if fit_value is None:
                    # ANT FIT SDK Dump tool cadence reports invalid as 0
                    if field_name == 'cadence' and csv_value == '0':
                        csv_value = None
                    # ANT FIT SDK Dump tool invalid lat/lng reports as last valid
                    if field_name == 'position_lat':
                        fit_value = last_valid_lat
                    if field_name == 'position_long':
                        fit_value = last_valid_long

                if isinstance(fit_value, int):
                    csv_value = int(fit_value)
                if csv_value == '':
                    csv_value = None

                if isinstance(fit_value, float):
                    # Float comparison
                    self.assertAlmostEqual(fit_value, float(csv_value))
                else:
                    self.assertEqual(fit_value, csv_value,
                        msg="For %s, FIT value '%s' did not match CSV value '%s'" % (field_name, fit_value, csv_value))

        try:
            next(messages)
            self.fail(".FIT file had more than csv file")
        except StopIteration:
            pass

        try:
            next(csv_messages)
            self.fail(".CSV file had more messages than .FIT file")
        except StopIteration:
            pass

        csv_fp.close()