def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] now = datetime.now() prev = now - timedelta(6 * 365 / 12) period = [] aperiod = "%s%02d-%s%02d" % (prev.year, prev.month, now.year, now.month) period.append(aperiod) if exhaustive: for _ in range(20): now = prev prev = now - timedelta(6 * 365 / 12) aperiod = "%s%02d-%s%02d" % (prev.year, prev.month, now.year, now.month) period.append(aperiod) for dateInterval in period: headers = self._getAuthHeaders(svcRecord) resp = requests.get(DECATHLONCOACH_API_BASE_URL + "/users/" + str(svcRecord.ExternalID) + "/activities.xml?date=" + dateInterval, headers=headers) if resp.status_code == 400: logger.info(resp.content) raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) if resp.status_code == 401: logger.info(resp.content) raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) if resp.status_code == 403: logger.info(resp.content) raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) root = xml.fromstring(resp.content) logger.info("\t\t nb activity : " + str(len(root.findall('.//ID')))) for ride in root.iter('ACTIVITY'): activity = UploadedActivity() activity.TZ = pytz.timezone("UTC") startdate = ride.find('.//STARTDATE').text + ride.find( './/TIMEZONE').text datebase = parse(startdate) activity.StartTime = datebase #pytz.utc.localize(datebase) activity.ServiceData = { "ActivityID": ride.find('ID').text, "Manual": ride.find('MANUAL').text } logger.info("\t\t DecathlonCoach Activity ID : " + ride.find('ID').text) if ride.find('SPORTID' ).text not in self._reverseActivityTypeMappings: exclusions.append( APIExcludeActivity("Unsupported activity type %s" % ride.find('SPORTID').text, activity_id=ride.find('ID').text, user_exception=UserException( UserExceptionType.Other))) logger.info( "\t\tDecathlonCoach Unknown activity, sport id " + ride.find('SPORTID').text + " is not mapped") continue activity.Type = self._reverseActivityTypeMappings[ride.find( 'SPORTID').text] for val in ride.iter('VALUE'): if val.get('id') == self._unitMap["duration"]: activity.EndTime = activity.StartTime + timedelta( 0, int(val.text)) if val.get('id') == self._unitMap["distance"]: activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=int(val.text)) if val.get('id') == self._unitMap["kcal"]: activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilocalories, value=int(val.text)) if val.get('id') == self._unitMap["speedaverage"]: meterperhour = int(val.text) meterpersecond = meterperhour / 3600 activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=meterpersecond, max=None) if ride.find('LIBELLE' ).text == "" or ride.find('LIBELLE').text is None: txtdate = startdate.split(' ') activity.Name = "Sport DecathlonCoach " + txtdate[0] else: activity.Name = ride.find('LIBELLE').text activity.Private = False activity.Stationary = ride.find('MANUAL').text activity.GPS = ride.find('ABOUT').find('TRACK').text activity.AdjustTZ() activity.CalculateUID() activities.append(activity) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): oauthSession = self._oauthSession(serviceRecord) activities = [] exclusions = [] page_url = "https://api.endomondo.com/api/1/workouts" while True: resp = oauthSession.get(page_url) try: respList = resp.json()["data"] except ValueError: self._rateLimitBailout(resp) raise APIException("Error decoding activity list resp %s %s" % (resp.status_code, resp.text)) for actInfo in respList: activity = UploadedActivity() activity.StartTime = self._parseDate(actInfo["start_time"]) logger.debug("Activity s/t %s" % activity.StartTime) if "is_tracking" in actInfo and actInfo["is_tracking"]: exclusions.append( APIExcludeActivity( "Not complete", activity_id=actInfo["id"], permanent=False, user_exception=UserException( UserExceptionType.LiveTracking))) continue if "end_time" in actInfo: activity.EndTime = self._parseDate(actInfo["end_time"]) if actInfo["sport"] in self._activityMappings: activity.Type = self._activityMappings[actInfo["sport"]] # "duration" is timer time if "duration_total" in actInfo: activity.Stats.TimerTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=float(actInfo["duration_total"])) if "distance_total" in actInfo: activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Kilometers, value=float(actInfo["distance_total"])) if "calories_total" in actInfo: activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilocalories, value=float(actInfo["calories_total"])) activity.Stats.Elevation = ActivityStatistic( ActivityStatisticUnit.Meters) if "altitude_max" in actInfo: activity.Stats.Elevation.Max = float( actInfo["altitude_max"]) if "altitude_min" in actInfo: activity.Stats.Elevation.Min = float( actInfo["altitude_min"]) if "total_ascent" in actInfo: activity.Stats.Elevation.Gain = float( actInfo["total_ascent"]) if "total_descent" in actInfo: activity.Stats.Elevation.Loss = float( actInfo["total_descent"]) activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.KilometersPerHour) if "speed_max" in actInfo: activity.Stats.Speed.Max = float(actInfo["speed_max"]) if "heart_rate_avg" in actInfo: activity.Stats.HR = ActivityStatistic( ActivityStatisticUnit.BeatsPerMinute, avg=float(actInfo["heart_rate_avg"])) if "heart_rate_max" in actInfo: activity.Stats.HR.update( ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=float( actInfo["heart_rate_max"]))) if "cadence_avg" in actInfo: activity.Stats.Cadence = ActivityStatistic( ActivityStatisticUnit.RevolutionsPerMinute, avg=int(actInfo["cadence_avg"])) if "cadence_max" in actInfo: activity.Stats.Cadence.update( ActivityStatistic( ActivityStatisticUnit.RevolutionsPerMinute, max=int(actInfo["cadence_max"]))) if "title" in actInfo: activity.Name = actInfo["title"] activity.ServiceData = { "WorkoutID": int(actInfo["id"]), "Sport": actInfo["sport"] } activity.CalculateUID() activities.append(activity) paging = resp.json()["paging"] if "next" not in paging or not paging["next"] or not exhaustive: break else: page_url = paging["next"] return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): headers = self._getAuthHeaders(serviceRecord) activities = [] exclusions = [] pageUri = self.OpenFitEndpoint + "/fitnessActivities.json" activity_tz_cache_raw = cachedb.sporttracks_meta_cache.find_one( {"ExternalID": serviceRecord.ExternalID}) activity_tz_cache_raw = activity_tz_cache_raw if activity_tz_cache_raw else { "Activities": [] } activity_tz_cache = dict([(x["ActivityURI"], x["TZ"]) for x in activity_tz_cache_raw["Activities"] ]) while True: logger.debug("Req against " + pageUri) res = requests.get(pageUri, headers=headers) try: res = res.json() except ValueError: raise APIException( "Could not decode activity list response %s %s" % (res.status_code, res.text)) for act in res["items"]: activity = UploadedActivity() activity.ServiceData = {"ActivityURI": act["uri"]} if len(act["name"].strip()): activity.Name = act["name"] # Longstanding ST.mobi bug causes it to return negative partial-hour timezones as "-2:-30" instead of "-2:30" fixed_start_time = re.sub(r":-(\d\d)", r":\1", act["start_time"]) activity.StartTime = dateutil.parser.parse(fixed_start_time) if isinstance(activity.StartTime.tzinfo, tzutc): activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value. else: activity.TZ = pytz.FixedOffset( activity.StartTime.tzinfo.utcoffset( activity.StartTime).total_seconds() / 60 ) # Convert the dateutil lame timezones into pytz awesome timezones. activity.StartTime = activity.StartTime.replace( tzinfo=activity.TZ) activity.EndTime = activity.StartTime + timedelta( seconds=float(act["duration"])) activity.Stats.TimerTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=float(act["duration"] )) # OpenFit says this excludes paused times. # Sometimes activities get returned with a UTC timezone even when they are clearly not in UTC. if activity.TZ == pytz.utc: if act["uri"] in activity_tz_cache: activity.TZ = pytz.FixedOffset( activity_tz_cache[act["uri"]]) else: # So, we get the first location in the activity and calculate the TZ from that. try: firstLocation = self._downloadActivity( serviceRecord, activity, returnFirstLocation=True) except APIExcludeActivity: pass else: try: activity.CalculateTZ(firstLocation, recalculate=True) except: # We tried! pass else: activity.AdjustTZ() finally: activity_tz_cache[ act["uri"]] = activity.StartTime.utcoffset( ).total_seconds() / 60 logger.debug("Activity s/t " + str(activity.StartTime)) activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=float(act["total_distance"])) types = [x.strip().lower() for x in act["type"].split(":")] types.reverse( ) # The incoming format is like "walking: hiking" and we want the most specific first activity.Type = None for type_key in types: if type_key in self._activityMappings: activity.Type = self._activityMappings[type_key] break if not activity.Type: exclusions.append( APIExcludeActivity("Unknown activity type %s" % act["type"], activity_id=act["uri"], user_exception=UserException( UserExceptionType.Other))) continue activity.CalculateUID() activities.append(activity) if not exhaustive or "next" not in res or not len(res["next"]): break else: pageUri = res["next"] logger.debug("Writing back meta cache") cachedb.sporttracks_meta_cache.update( {"ExternalID": serviceRecord.ExternalID}, { "ExternalID": serviceRecord.ExternalID, "Activities": [{ "ActivityURI": k, "TZ": v } for k, v in activity_tz_cache.items()] }, upsert=True) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 cookies = self._get_cookies(record=serviceRecord) page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({ "start": (page - 1) * pageSz, "limit": pageSz })) self._rate_limit() res = requests.get( "http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={ "start": (page - 1) * pageSz, "limit": pageSz }, cookies=cookies) res = res.json()["results"] if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] if "sumDistance" not in act: exclusions.append( APIExcludeActivity("No distance", activityId=act["activityId"], userException=UserException( UserExceptionType.Corrupt))) continue activity = UploadedActivity() if "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act: # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = True else: activity.Stationary = False try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset( float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip( )) and act["activityName"][ "value"] != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize( datetime.utcfromtimestamp( float(act["beginTimestamp"]["millis"]) / 1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta( 0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta( minutes=float(act["sumDuration"] ["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"] ["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize( datetime.utcfromtimestamp( float(act["endTimestamp"]["millis"]) / 1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time. activity.Stats.Distance = ActivityStatistic( self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])) def mapStat(gcKey, statKey, type, useSourceUnits=False): nonlocal activity, act if gcKey in act: value = float(act[gcKey]["value"]) if math.isinf(value): return # GC returns the minimum speed as "-Infinity" instead of 0 some times :S activity.Stats.__dict__[statKey].update( ActivityStatistic(self._unitMap[act[gcKey]["uom"]], **({ type: value }))) if useSourceUnits: activity.Stats.__dict__[ statKey] = activity.Stats.__dict__[ statKey].asUnits( self._unitMap[act[gcKey]["uom"]]) if "sumMovingDuration" in act: activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Time, value=timedelta( seconds=float(act["sumMovingDuration"]["value"]))) if "sumDuration" in act: activity.Stats.TimerTime = ActivityStatistic( ActivityStatisticUnit.Time, value=timedelta( minutes=float(act["sumDuration"] ["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"] ["minutesSeconds"].split(":")[1]))) mapStat( "minSpeed", "Speed", "min", useSourceUnits=True ) # We need to suppress conversion here, so we can fix the pace-speed issue below mapStat("maxSpeed", "Speed", "max", useSourceUnits=True) mapStat("weightedMeanSpeed", "Speed", "avg", useSourceUnits=True) mapStat("minAirTemperature", "Temperature", "min") mapStat("maxAirTemperature", "Temperature", "max") mapStat("weightedMeanAirTemperature", "Temperature", "avg") mapStat("sumEnergy", "Energy", "value") mapStat("maxHeartRate", "HR", "max") mapStat("weightedMeanHeartRate", "HR", "avg") mapStat("maxRunCadence", "RunCadence", "max") mapStat("weightedMeanRunCadence", "RunCadence", "avg") mapStat("maxBikeCadence", "Cadence", "max") mapStat("weightedMeanBikeCadence", "Cadence", "avg") mapStat("minPower", "Power", "min") mapStat("maxPower", "Power", "max") mapStat("weightedMeanPower", "Power", "avg") mapStat("minElevation", "Elevation", "min") mapStat("maxElevation", "Elevation", "max") mapStat("gainElevation", "Elevation", "gain") mapStat("lossElevation", "Elevation", "loss") # In Garmin Land, max can be smaller than min for this field :S if activity.Stats.Power.Max is not None and activity.Stats.Power.Min is not None and activity.Stats.Power.Min > activity.Stats.Power.Max: activity.Stats.Power.Min = None # To get it to match what the user sees in GC. if activity.Stats.RunCadence.Max is not None: activity.Stats.RunCadence.Max *= 2 if activity.Stats.RunCadence.Average is not None: activity.Stats.RunCadence.Average *= 2 # GC incorrectly reports pace measurements as kph/mph when they are in fact in min/km or min/mi if "minSpeed" in act: if ":" in act["minSpeed"][ "withUnitAbbr"] and activity.Stats.Speed.Min: activity.Stats.Speed.Min = 60 / activity.Stats.Speed.Min if "maxSpeed" in act: if ":" in act["maxSpeed"][ "withUnitAbbr"] and activity.Stats.Speed.Max: activity.Stats.Speed.Max = 60 / activity.Stats.Speed.Max if "weightedMeanSpeed" in act: if ":" in act["weightedMeanSpeed"][ "withUnitAbbr"] and activity.Stats.Speed.Average: activity.Stats.Speed.Average = 60 / activity.Stats.Speed.Average # Similarly, they do weird stuff with HR at times - %-of-max and zones # ...and we can't just fix these, so we have to calculate it after the fact (blegh) recalcHR = False if "maxHeartRate" in act: if "%" in act["maxHeartRate"]["withUnitAbbr"] or "z" in act[ "maxHeartRate"]["withUnitAbbr"]: activity.Stats.HR.Max = None recalcHR = True if "weightedMeanHeartRate" in act: if "%" in act["weightedMeanHeartRate"][ "withUnitAbbr"] or "z" in act[ "weightedMeanHeartRate"]["withUnitAbbr"]: activity.Stats.HR.Average = None recalcHR = True activity.Type = self._resolveActivityType( act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = { "ActivityID": act["activityId"], "RecalcHR": recalcHR } activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def _downloadActivitySummary(self, serviceRecord, activity): activityID = activity.ServiceData["ActivityID"] summary_resp = self._request_with_reauth(lambda session: session.get("https://connect.garmin.com/modern/proxy/activity-service/activity/" + str(activityID)), serviceRecord) try: summary_data = summary_resp.json() except ValueError: raise APIException("Failure downloading activity summary %s:%s" % (summary_resp.status_code, summary_resp.text)) stat_map = {} def mapStat(gcKey, statKey, type, units): stat_map[gcKey] = { "key": statKey, "attr": type, "units": units } def applyStats(gc_dict, stats_obj): for gc_key, stat in stat_map.items(): if gc_key in gc_dict: value = float(gc_dict[gc_key]) if math.isinf(value): continue # GC returns the minimum speed as "-Infinity" instead of 0 some times :S getattr(stats_obj, stat["key"]).update(ActivityStatistic(stat["units"], **({stat["attr"]: value}))) mapStat("movingDuration", "MovingTime", "value", ActivityStatisticUnit.Seconds) mapStat("duration", "TimerTime", "value", ActivityStatisticUnit.Seconds) mapStat("distance", "Distance", "value", ActivityStatisticUnit.Meters) mapStat("maxSpeed", "Speed", "max", ActivityStatisticUnit.MetersPerSecond) mapStat("averageSpeed", "Speed", "avg", ActivityStatisticUnit.MetersPerSecond) mapStat("calories", "Energy", "value", ActivityStatisticUnit.Kilocalories) mapStat("maxHR", "HR", "max", ActivityStatisticUnit.BeatsPerMinute) mapStat("averageHR", "HR", "avg", ActivityStatisticUnit.BeatsPerMinute) mapStat("minElevation", "Elevation", "min", ActivityStatisticUnit.Meters) mapStat("maxElevation", "Elevation", "max", ActivityStatisticUnit.Meters) mapStat("elevationGain", "Elevation", "gain", ActivityStatisticUnit.Meters) mapStat("elevationLoss", "Elevation", "loss", ActivityStatisticUnit.Meters) mapStat("averageBikeCadence", "Cadence", "avg", ActivityStatisticUnit.RevolutionsPerMinute) mapStat("averageCadence", "Cadence", "avg", ActivityStatisticUnit.StepsPerMinute) applyStats(summary_data["summaryDTO"], activity.Stats) laps_resp = self._request_with_reauth(lambda session: session.get("https://connect.garmin.com/modern/proxy/activity-service/activity/%s/splits" % str(activityID)), serviceRecord) try: laps_data = laps_resp.json() except ValueError: raise APIException("Failure downloading activity laps summary %s:%s" % (laps_resp.status_code, laps_resp.text)) for lap_data in laps_data["lapDTOs"]: lap = Lap() if "startTimeGMT" in lap_data: lap.StartTime = pytz.utc.localize(datetime.strptime(lap_data["startTimeGMT"], "%Y-%m-%dT%H:%M:%S.0")) elapsed_duration = None if "elapsedDuration" in lap_data: elapsed_duration = timedelta(seconds=round(float(lap_data["elapsedDuration"]))) elif "duration" in lap_data: elapsed_duration = timedelta(seconds=round(float(lap_data["duration"]))) if lap.StartTime and elapsed_duration: # Always recalculate end time based on duration, if we have the start time lap.EndTime = lap.StartTime + elapsed_duration if not lap.StartTime and lap.EndTime and elapsed_duration: # Sometimes calculate start time based on duration lap.StartTime = lap.EndTime - elapsed_duration if not lap.StartTime or not lap.EndTime: # Garmin Connect is weird. raise APIExcludeActivity("Activity lap has no BeginTimestamp or EndTimestamp", user_exception=UserException(UserExceptionType.Corrupt)) applyStats(lap_data, lap.Stats) activity.Laps.append(lap) # In Garmin Land, max can be smaller than min for this field :S if activity.Stats.Power.Max is not None and activity.Stats.Power.Min is not None and activity.Stats.Power.Min > activity.Stats.Power.Max: activity.Stats.Power.Min = None
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] before = earliestDate = None while True: if before is not None and before < 0: break # Caused by activities that "happened" before the epoch. We generally don't care about those activities... logger.debug("Req with before=" + str(before) + "/" + str(earliestDate)) self._globalRateLimit() resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before}) if resp.status_code == 401: raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) earliestDate = None try: reqdata = resp.json() except ValueError: raise APIException( "Failed parsing strava list response %s - %s" % (resp.status_code, resp.text)) if not len(reqdata): break # No more activities to see for ride in reqdata: activity = UploadedActivity() activity.TZ = pytz.timezone( re.sub("^\([^\)]+\)\s*", "", ride["timezone"]) ) # Comes back as "(GMT -13:37) The Stuff/We Want"" activity.StartTime = pytz.utc.localize( datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ")) logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, ride["name"])) if not earliestDate or activity.StartTime < earliestDate: earliestDate = activity.StartTime before = calendar.timegm( activity.StartTime.astimezone(pytz.utc).timetuple()) activity.EndTime = activity.StartTime + timedelta( 0, ride["elapsed_time"]) activity.ServiceData = { "ActivityID": ride["id"], "Manual": ride["manual"] } if ride["type"] not in self._reverseActivityTypeMappings: exclusions.append( APIExcludeActivity("Unsupported activity type %s" % ride["type"], activity_id=ride["id"], user_exception=UserException( UserExceptionType.Other))) logger.debug("\t\tUnknown activity") continue activity.Type = self._reverseActivityTypeMappings[ride["type"]] activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=ride["distance"]) if "max_speed" in ride or "average_speed" in ride: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=ride["average_speed"] if "average_speed" in ride else None, max=ride["max_speed"] if "max_speed" in ride else None) activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=ride["moving_time"] if "moving_time" in ride and ride["moving_time"] > 0 else None ) # They don't let you manually enter this, and I think it returns 0 for those activities. # Strava doesn't handle "timer time" to the best of my knowledge - although they say they do look at the FIT total_timer_time field, so...? if "average_watts" in ride: activity.Stats.Power = ActivityStatistic( ActivityStatisticUnit.Watts, avg=ride["average_watts"]) if "average_heartrate" in ride: activity.Stats.HR.update( ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=ride["average_heartrate"])) if "max_heartrate" in ride: activity.Stats.HR.update( ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=ride["max_heartrate"])) if "average_cadence" in ride: activity.Stats.Cadence.update( ActivityStatistic( ActivityStatisticUnit.RevolutionsPerMinute, avg=ride["average_cadence"])) if "average_temp" in ride: activity.Stats.Temperature.update( ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=ride["average_temp"])) if "calories" in ride: activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilocalories, value=ride["calories"]) activity.Name = ride["name"] activity.Private = ride["private"] activity.Stationary = ride["manual"] activity.GPS = ("start_latlng" in ride) and (ride["start_latlng"] is not None) activity.AdjustTZ() activity.CalculateUID() activities.append(activity) if not exhaustive or not earliestDate: break return activities, exclusions
def _downloadActivitySummary(self, serviceRecord, activity): activityID = activity.ServiceData["ActivityID"] session = self._get_session(record=serviceRecord) self._rate_limit() res = session.get("http://connect.garmin.com/proxy/activity-service-1.3/json/activity/" + str(activityID)) try: raw_data = res.json() except ValueError: raise APIException("Failure downloading activity summary %s:%s" % (res.status_code, res.text)) stat_map = {} def mapStat(gcKey, statKey, type): stat_map[gcKey] = { "key": statKey, "attr": type } def applyStats(gc_dict, stats_obj): for gc_key, stat in stat_map.items(): if gc_key in gc_dict: value = float(gc_dict[gc_key]["value"]) units = self._unitMap[gc_dict[gc_key]["uom"]] if math.isinf(value): continue # GC returns the minimum speed as "-Infinity" instead of 0 some times :S getattr(stats_obj, stat["key"]).update(ActivityStatistic(units, **({stat["attr"]: value}))) mapStat("SumMovingDuration", "MovingTime", "value") mapStat("SumDuration", "TimerTime", "value") mapStat("SumDistance", "Distance", "value") mapStat("MinSpeed", "Speed", "min") mapStat("MaxSpeed", "Speed", "max") mapStat("WeightedMeanSpeed", "Speed", "avg") mapStat("MinAirTemperature", "Temperature", "min") mapStat("MaxAirTemperature", "Temperature", "max") mapStat("WeightedMeanAirTemperature", "Temperature", "avg") mapStat("SumEnergy", "Energy", "value") mapStat("MaxHeartRate", "HR", "max") mapStat("WeightedMeanHeartRate", "HR", "avg") mapStat("MaxDoubleCadence", "RunCadence", "max") mapStat("WeightedMeanDoubleCadence", "RunCadence", "avg") mapStat("MaxBikeCadence", "Cadence", "max") mapStat("WeightedMeanBikeCadence", "Cadence", "avg") mapStat("MinPower", "Power", "min") mapStat("MaxPower", "Power", "max") mapStat("WeightedMeanPower", "Power", "avg") mapStat("MinElevation", "Elevation", "min") mapStat("MaxElevation", "Elevation", "max") mapStat("GainElevation", "Elevation", "gain") mapStat("LossElevation", "Elevation", "loss") applyStats(raw_data["activity"]["activitySummary"], activity.Stats) for lap_data in raw_data["activity"]["totalLaps"]["lapSummaryList"]: lap = Lap() if "BeginTimestamp" in lap_data: lap.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(lap_data["BeginTimestamp"]["value"]) / 1000)) if "EndTimestamp" in lap_data: lap.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(lap_data["EndTimestamp"]["value"]) / 1000)) elapsed_duration = None if "SumElapsedDuration" in lap_data: elapsed_duration = timedelta(seconds=round(float(lap_data["SumElapsedDuration"]["value"]))) elif "SumDuration" in lap_data: elapsed_duration = timedelta(seconds=round(float(lap_data["SumDuration"]["value"]))) if lap.StartTime and elapsed_duration: # Always recalculate end time based on duration, if we have the start time lap.EndTime = lap.StartTime + elapsed_duration if not lap.StartTime and lap.EndTime and elapsed_duration: # Sometimes calculate start time based on duration lap.StartTime = lap.EndTime - elapsed_duration if not lap.StartTime or not lap.EndTime: # Garmin Connect is weird. raise APIExcludeActivity("Activity lap has no BeginTimestamp or EndTimestamp", user_exception=UserException(UserExceptionType.Corrupt)) applyStats(lap_data, lap.Stats) activity.Laps.append(lap) # In Garmin Land, max can be smaller than min for this field :S if activity.Stats.Power.Max is not None and activity.Stats.Power.Min is not None and activity.Stats.Power.Min > activity.Stats.Power.Max: activity.Stats.Power.Min = None
def DownloadActivityList(self, serviceRecord, exhaustive=False): def mapStatTriple(act, stats_obj, key, units): if "%s_max" % key in act and act["%s_max" % key]: stats_obj.update(ActivityStatistic(units, max=float(act["%s_max" % key]))) if "%s_min" % key in act and act["%s_min" % key]: stats_obj.update(ActivityStatistic(units, min=float(act["%s_min" % key]))) if "%s_avg" % key in act and act["%s_avg" % key]: stats_obj.update(ActivityStatistic(units, avg=float(act["%s_avg" % key]))) # http://ridewithgps.com/users/1/trips.json?limit=200&order_by=created_at&order_dir=asc # offset also supported activities = [] exclusions = [] # They don't actually support paging right now, for whatever reason params = self._add_auth_params({}, record=serviceRecord) res = requests.get("https://ridewithgps.com/users/{}/trips.json".format(serviceRecord.ExternalID), params=params) res = res.json() # Apparently some API users are seeing this new result format - I'm not if type(res) is dict: res = res.get("results", []) if res == []: return [], [] # No activities for act in res: if "distance" not in act: exclusions.append(APIExcludeActivity("No distance", activity_id=act["id"], user_exception=UserException(UserExceptionType.Corrupt))) continue if "duration" not in act or not act["duration"]: exclusions.append(APIExcludeActivity("No duration", activity_id=act["id"], user_exception=UserException(UserExceptionType.Corrupt))) continue activity = UploadedActivity() logger.debug("Name " + act["name"] + ":") if act.get("name", None) and len(act["name"].strip()): activity.Name = act["name"] if act.get("description", None) and len(act["description"].strip()): activity.Notes = act["description"] activity.GPS = act["is_gps"] activity.Stationary = not activity.GPS # I think # 0 = public, 1 = private, 2 = friends activity.Private = act["visibility"] == 1 activity.StartTime = dateutil.parser.parse(act["departed_at"]) try: activity.TZ = pytz.timezone(act["time_zone"]) except pytz.exceptions.UnknownTimeZoneError: # Sometimes the time_zone returned isn't quite what we'd like it # So, just pull the offset from the datetime if isinstance(activity.StartTime.tzinfo, tzutc): activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value. else: activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo.utcoffset(activity.StartTime).total_seconds() / 60) activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ) # Overwrite dateutil's sillyness activity.EndTime = activity.StartTime + timedelta(seconds=self._duration_to_seconds(act["duration"])) logger.debug("Activity s/t " + str(activity.StartTime)) activity.AdjustTZ() activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, float(act["distance"])) mapStatTriple(act, activity.Stats.Power, "watts", ActivityStatisticUnit.Watts) mapStatTriple(act, activity.Stats.Speed, "speed", ActivityStatisticUnit.KilometersPerHour) mapStatTriple(act, activity.Stats.Cadence, "cad", ActivityStatisticUnit.RevolutionsPerMinute) mapStatTriple(act, activity.Stats.HR, "hr", ActivityStatisticUnit.BeatsPerMinute) if "elevation_gain" in act and act["elevation_gain"]: activity.Stats.Elevation.update(ActivityStatistic(ActivityStatisticUnit.Meters, gain=float(act["elevation_gain"]))) if "elevation_loss" in act and act["elevation_loss"]: activity.Stats.Elevation.update(ActivityStatistic(ActivityStatisticUnit.Meters, loss=float(act["elevation_loss"]))) # Activity type is not implemented yet in RWGPS results; we will assume cycling, though perhaps "OTHER" wouuld be correct activity.Type = ActivityType.Cycling activity.CalculateUID() activity.ServiceData = {"ActivityID": act["id"]} activities.append(activity) return activities, exclusions
def DownloadActivity(self, svcRecord, activity): if activity.ServiceData[ "Manual"]: # I should really add a param to DownloadActivity for this value as opposed to constantly doing this # We've got as much information as we're going to get - we need to copy it into a Lap though. activity.Laps = [ Lap(startTime=activity.StartTime, endTime=activity.EndTime, stats=activity.Stats) ] return activity activityID = activity.ServiceData["ActivityID"] streamdata = requests.get( "https://www.strava.com/api/v3/activities/" + str(activityID) + "/streams/time,altitude,heartrate,cadence,watts,temp,moving,latlng", headers=self._apiHeaders(svcRecord)) if streamdata.status_code == 401: self._logAPICall("download", (svcRecord.ExternalID, str(activity.StartTime)), "auth") raise APIException("No authorization to download activity", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) try: streamdata = streamdata.json() except: raise APIException("Stream data returned is not JSON") if "message" in streamdata and streamdata[ "message"] == "Record Not Found": self._logAPICall("download", (svcRecord.ExternalID, str(activity.StartTime)), "missing") raise APIException("Could not find activity") ridedata = {} for stream in streamdata: ridedata[stream["type"]] = stream["data"] lap = Lap( stats=activity.Stats, startTime=activity.StartTime, endTime=activity.EndTime ) # Strava doesn't support laps, but we need somewhere to put the waypoints. activity.Laps = [lap] lap.Waypoints = [] hasHR = "heartrate" in ridedata and len(ridedata["heartrate"]) > 0 hasCadence = "cadence" in ridedata and len(ridedata["cadence"]) > 0 hasTemp = "temp" in ridedata and len(ridedata["temp"]) > 0 hasPower = ("watts" in ridedata and len(ridedata["watts"]) > 0) hasAltitude = "altitude" in ridedata and len(ridedata["altitude"]) > 0 hasMovingData = "moving" in ridedata and len(ridedata["moving"]) > 0 moving = True if "error" in ridedata: self._logAPICall("download", (svcRecord.ExternalID, str(activity.StartTime)), "data") raise APIException("Strava error " + ridedata["error"]) hasLocation = False waypointCt = len(ridedata["time"]) for idx in range(0, waypointCt - 1): latlng = ridedata["latlng"][idx] waypoint = Waypoint(activity.StartTime + timedelta(0, ridedata["time"][idx])) latlng = ridedata["latlng"][idx] waypoint.Location = Location(latlng[0], latlng[1], None) if waypoint.Location.Longitude == 0 and waypoint.Location.Latitude == 0: waypoint.Location.Longitude = None waypoint.Location.Latitude = None else: # strava only returns 0 as invalid coords, so no need to check for null (update: ??) hasLocation = True if hasAltitude: waypoint.Location.Altitude = float(ridedata["altitude"][idx]) if idx == 0: waypoint.Type = WaypointType.Start elif idx == waypointCt - 2: waypoint.Type = WaypointType.End elif hasMovingData and not moving and ridedata["moving"][ idx] is True: waypoint.Type = WaypointType.Resume moving = True elif hasMovingData and ridedata["moving"][idx] is False: waypoint.Type = WaypointType.Pause moving = False if hasHR: waypoint.HR = ridedata["heartrate"][idx] if hasCadence: waypoint.Cadence = ridedata["cadence"][idx] if hasTemp: waypoint.Temp = ridedata["temp"][idx] if hasPower: waypoint.Power = ridedata["watts"][idx] lap.Waypoints.append(waypoint) if not hasLocation: self._logAPICall("download", (svcRecord.ExternalID, str(activity.StartTime)), "faulty") raise APIExcludeActivity("No waypoints with location", activityId=activityID, userException=UserException( UserExceptionType.Corrupt)) self._logAPICall("download", (svcRecord.ExternalID, str(activity.StartTime)), None) return activity
def DownloadActivityList(self, svcRecord, exhaustive=False): ns = self._tp_ns activities = [] exclusions = [] reqData = self._authData(svcRecord) limitDateFormat = "%d %B %Y" if exhaustive: listEnd = datetime.now() + timedelta( days=1.5) # Who knows which TZ it's in listStart = datetime(day=1, month=1, year=1980) # The beginning of time else: listEnd = datetime.now() + timedelta( days=1.5) # Who knows which TZ it's in listStart = listEnd - timedelta(days=20) # Doesn't really matter lastActivityDay = None discoveredWorkoutIds = [] while True: reqData.update({ "startDate": listStart.strftime(limitDateFormat), "endDate": listEnd.strftime(limitDateFormat) }) print("Requesting %s to %s" % (listStart, listEnd)) resp = requests.post( "https://www.trainingpeaks.com/tpwebservices/service.asmx/GetWorkoutsForAthlete", data=reqData) xresp = etree.XML(resp.content) for xworkout in xresp: activity = UploadedActivity() workoutId = xworkout.find("tpw:WorkoutId", namespaces=ns).text workoutDayEl = xworkout.find("tpw:WorkoutDay", namespaces=ns) startTimeEl = xworkout.find("tpw:StartTime", namespaces=ns) workoutDay = dateutil.parser.parse(workoutDayEl.text) startTime = dateutil.parser.parse( startTimeEl.text ) if startTimeEl is not None and startTimeEl.text else None if lastActivityDay is None or workoutDay.replace( tzinfo=None) > lastActivityDay: lastActivityDay = workoutDay.replace(tzinfo=None) if startTime is None: continue # Planned but not executed yet. activity.StartTime = startTime endTimeEl = xworkout.find("tpw:TimeTotalInSeconds", namespaces=ns) if not endTimeEl.text: exclusions.append( APIExcludeActivity("Activity has no duration", activity_id=workoutId, user_exception=UserException( UserExceptionType.Corrupt))) continue activity.EndTime = activity.StartTime + timedelta( seconds=float(endTimeEl.text)) distEl = xworkout.find("tpw:DistanceInMeters", namespaces=ns) if distEl.text: activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=float(distEl.text)) # PWX is damn near comprehensive, no need to fill in any of the other statisitcs here, really if workoutId in discoveredWorkoutIds: continue # There's the possibility of query overlap, if there are multiple activities on a single day that fall across the query return limit discoveredWorkoutIds.append(workoutId) workoutTypeEl = xworkout.find("tpw:WorkoutTypeDescription", namespaces=ns) if workoutTypeEl.text: if workoutTypeEl.text == "Day Off": continue # TrainingPeaks has some weird activity types... if workoutTypeEl.text not in self._workoutTypeMappings: exclusions.append( APIExcludeActivity("Activity type %s unknown" % workoutTypeEl.text, activity_id=workoutId, user_exception=UserException( UserExceptionType.Corrupt))) continue activity.Type = self._workoutTypeMappings[ workoutTypeEl.text] activity.ServiceData = {"WorkoutID": workoutId} activity.CalculateUID() activities.append(activity) if not exhaustive: break # Since TP only lets us query by date range, to get full activity history we need to query successively smaller ranges if len(xresp): if listStart == lastActivityDay: break # This wouldn't work if you had more than #MaxQueryReturn activities on that day - but that number is probably 50+ listStart = lastActivityDay else: break # We're done return activities, exclusions