def DownloadActivityList(self, serviceRecord, exhaustive=False): activities = [] exclusions = [] for act in self._getActivities(serviceRecord, exhaustive=exhaustive): activity = UploadedActivity() activity.StartTime = dateutil.parser.parse(act['startDateTimeLocal']) activity.EndTime = activity.StartTime + timedelta(seconds=act['duration']) _type = self._activityMappings.get(act['activityType']) if not _type: exclusions.append(APIExcludeActivity("Unsupported activity type %s" % act['activityType'], activity_id=act["activityId"], user_exception=UserException(UserExceptionType.Other))) activity.ServiceData = {"ActivityID": act['activityId']} activity.Type = _type activity.Notes = act['notes'] activity.GPS = bool(act.get('startLatitude')) activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=act['distance']) activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=act['calories']) if 'heartRateMin' in act: activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, min=act['heartRateMin'], max=act['heartRateMax'], avg=act['heartRateAverage']) activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=act['duration']) if 'temperature' in act: activity.Stats.Temperature = ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=act['temperature']) activity.CalculateUID() logger.debug("\tActivity s/t %s", activity.StartTime) activities.append(activity) return activities, exclusions
def _populateActivity(self, rawRecord): ''' Populate the 1st level of the activity object with all details required for UID from pulsstory API data ''' activity = UploadedActivity() # can stay local + naive here, recipient services can calculate TZ as required activity.Name = rawRecord["Name"] if "Name" in rawRecord else None activity.StartTime = datetime.strptime(rawRecord["StartTime"], "%Y-%m-%d %H:%M:%S") activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=float(rawRecord["Duration"])) activity.EndTime = activity.StartTime + timedelta( seconds=float(rawRecord["Duration"])) activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=rawRecord["Distance"]) if (activity.EndTime - activity.StartTime).total_seconds() > 0: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.KilometersPerHour, avg=activity.Stats.Distance.asUnits( ActivityStatisticUnit.Kilometers).Value / ((activity.EndTime - activity.StartTime).total_seconds() / 60 / 60)) activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilocalories, value=rawRecord["Energy"] if "Energy" in rawRecord else None) if rawRecord["Type"] in self._activityMappings: activity.Type = self._activityMappings[rawRecord["Type"]] activity.GPS = rawRecord["HasPath"] if "HasPath" in rawRecord else False activity.Stationary = rawRecord[ "HasPoints"] if "HasPoints" in rawRecord else True activity.Notes = rawRecord["Notes"] if "Notes" in rawRecord else None activity.Private = rawRecord[ "Private"] if "Private" in rawRecord else True activity.CalculateUID() return activity
def DownloadActivityList(self, serviceRecord, exhaustive=False): #https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?limit=20&start=0 page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) res = self._request_with_reauth(lambda session: session.get("https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}), serviceRecord) try: res = res.json() except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res_txt)) for act in res: activity = UploadedActivity() # stationary activities have movingDuration = None while non-gps static activities have 0.0 activity.Stationary = act["movingDuration"] is None activity.GPS = act["hasPolyline"] activity.Private = act["privacy"]["typeKey"] == "private" activity_name = act["activityName"] logger.debug("Name " + activity_name if activity_name is not None else "Untitled" + ":") if activity_name is not None and len(activity_name.strip()) and activity_name != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = activity_name activity_description = act["description"] if activity_description is not None and len(activity_description.strip()): activity.Notes = activity_description activity.StartTime = pytz.utc.localize(datetime.strptime(act["startTimeGMT"], "%Y-%m-%d %H:%M:%S")) if act["elapsedDuration"] is not None: activity.EndTime = activity.StartTime + timedelta(0, float(act["elapsedDuration"])/1000) elif act["duration"] is not None: activity.EndTime = activity.StartTime + timedelta(0, float(act["duration"])) else: # somehow duration is not defined. Set 1 second then. activity.EndTime = activity.StartTime + timedelta(0, 1) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) if "distance" in act and act["distance"] and float(act["distance"]) != 0: activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(act["distance"])) activity.Type = self._resolveActivityType(act["activityType"]["typeKey"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page)) if not exhaustive or len(res) == 0: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?limit=20&start=0 page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) res = self._request_with_reauth(lambda session: session.get("https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}), serviceRecord) try: res = res.json() except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res_txt)) for act in res: activity = UploadedActivity() # stationary activities have movingDuration = None while non-gps static activities have 0.0 activity.Stationary = act["movingDuration"] is None activity.GPS = act["hasPolyline"] activity.Private = act["privacy"]["typeKey"] == "private" activity_name = act["activityName"] logger.debug("Name " + activity_name if activity_name is not None else "Untitled" + ":") if activity_name is not None and len(activity_name.strip()) and activity_name != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = activity_name activity_description = act["description"] if activity_description is not None and len(activity_description.strip()): activity.Notes = activity_description activity.StartTime = pytz.utc.localize(datetime.strptime(act["startTimeGMT"], "%Y-%m-%d %H:%M:%S")) if act["elapsedDuration"] is not None: activity.EndTime = activity.StartTime + timedelta(0, float(act["elapsedDuration"])/1000) else: activity.EndTime = activity.StartTime + timedelta(0, float(act["duration"])) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) if "distance" in act and act["distance"] and float(act["distance"]) != 0: activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(act["distance"])) activity.Type = self._resolveActivityType(act["activityType"]["typeKey"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page)) if not exhaustive or len(res) == 0: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): activities = [] session = self._get_session(record=serviceRecord) session.headers.update({"Accept": "application/json"}) workouts_resp = session.get("https://api.trainerroad.com/api/careerworkouts") if workouts_resp.status_code != 200: if workouts_resp.status_code == 401: raise APIException("Invalid login", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) raise APIException("Workout listing error") cached_record = cachedb.trainerroad_meta.find_one({"ExternalID": serviceRecord.ExternalID}) if not cached_record: cached_workout_meta = {} else: cached_workout_meta = cached_record["Workouts"] workouts = workouts_resp.json() for workout in workouts: # Un/f their API doesn't provide the start/end times in the list response # So we need to pull the extra data, if it's not already cached workout_id = str(workout["Id"]) # Mongo doesn't do non-string keys if workout_id not in cached_workout_meta: meta_resp = session.get("https://api.trainerroad.com/api/careerworkouts?guid=%s" % workout["Guid"]) # We don't need everything full_meta = meta_resp.json() meta = {key: full_meta[key] for key in ["WorkoutDate", "WorkoutName", "WorkoutNotes", "TotalMinutes", "TotalKM", "AvgWatts", "Kj"]} cached_workout_meta[workout_id] = meta else: meta = cached_workout_meta[workout_id] activity = UploadedActivity() activity.ServiceData = {"ID": int(workout_id)} activity.Name = meta["WorkoutName"] activity.Notes = meta["WorkoutNotes"] activity.Type = ActivityType.Cycling # Everything's in UTC activity.StartTime = dateutil.parser.parse(meta["WorkoutDate"]).replace(tzinfo=pytz.utc) activity.EndTime = activity.StartTime + timedelta(minutes=meta["TotalMinutes"]) activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=meta["TotalKM"]) activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, avg=meta["AvgWatts"]) activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilojoules, value=meta["Kj"]) activity.Stationary = False activity.GPS = False activity.CalculateUID() activities.append(activity) cachedb.trainerroad_meta.update({"ExternalID": serviceRecord.ExternalID}, {"ExternalID": serviceRecord.ExternalID, "Workouts": cached_workout_meta}, upsert=True) return activities, []
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] url = self.SingletrackerDomain + "getRidesByUserId" extID = svcRecord.ExternalID payload = {"userId": extID} headers = { 'content-type': "application/json", 'cache-control': "no-cache", } response = requests.post(url, data=json.dumps(payload), headers=headers) try: reqdata = response.json() except ValueError: raise APIException("Failed parsing Singletracker list response %s - %s" % (resp.status_code, resp.text)) for ride in reqdata: activity = UploadedActivity() activity.StartTime = datetime.strptime( datetime.utcfromtimestamp(ride["startTime"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") if "stopTime" in ride: activity.EndTime = datetime.strptime( datetime.utcfromtimestamp(ride["stopTime"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") activity.ServiceData = {"ActivityID": ride["rideId"], "Manual": "False"} activity.Name = ride["trackName"] logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, activity.Name)) activity.Type = ActivityType.MountainBiking if "totalDistance" in ride: activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=ride["totalDistance"]) if "avgSpeed" in ride: activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.MetersPerSecond, avg=ride["avgSpeed"]) activity.Notes = None activity.GPS = True activity.Private = False activity.Stationary = False # True = no sensor data activity.CalculateUID() activities.append(activity) return activities, exclusions
def _populateActivity(self, rawRecord): ''' Populate the 1st level of the activity object with all details required for UID from pulsstory API data ''' activity = UploadedActivity() # can stay local + naive here, recipient services can calculate TZ as required activity.Name = rawRecord["Name"] if "Name" in rawRecord else None activity.StartTime = datetime.strptime(rawRecord["StartTime"], "%Y-%m-%d %H:%M:%S") activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(rawRecord["Duration"])) activity.EndTime = activity.StartTime + timedelta(seconds=float(rawRecord["Duration"])) activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=rawRecord["Distance"]) if (activity.EndTime - activity.StartTime).total_seconds() > 0: activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour, avg=activity.Stats.Distance.asUnits(ActivityStatisticUnit.Kilometers).Value / ((activity.EndTime - activity.StartTime).total_seconds() / 60 / 60)) activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=rawRecord["Energy"] if "Energy" in rawRecord else None) if rawRecord["Type"] in self._activityMappings: activity.Type = self._activityMappings[rawRecord["Type"]] activity.GPS = rawRecord["HasPath"] if "HasPath" in rawRecord else False activity.Stationary = rawRecord["HasPoints"] if "HasPoints" in rawRecord else True activity.Notes = rawRecord["Notes"] if "Notes" in rawRecord else None activity.Private = rawRecord["Private"] if "Private" in rawRecord else True activity.CalculateUID() return activity
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) res = self._request_with_reauth(serviceRecord, lambda session: session.get("https://connect.garmin.com/modern/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz})) try: res = res.json()["results"] except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res.text)) if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] activity = UploadedActivity() # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act activity.GPS = "endLatitude" in act activity.Private = act["privacy"]["key"] == "private" try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() if "sumDistance" in act and float(act["sumDistance"]["value"]) != 0: activity.Stats.Distance = ActivityStatistic(self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])) if "device" in act and act["device"]["key"] != "unknown": devId = DeviceIdentifier.FindMatchingIdentifierOfType(DeviceIdentifierType.GC, {"Key": act["device"]["key"]}) ver_split = act["device"]["key"].split(".") ver_maj = None ver_min = None if len(ver_split) == 4: # 2.90.0.0 ver_maj = int(ver_split[0]) ver_min = int(ver_split[1]) activity.Device = Device(devId, verMaj=ver_maj, verMin=ver_min) activity.Type = self._resolveActivityType(act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive_start_time=None): activities = [] exclusions = [] headers = self._apiHeaders(svcRecord) limitDateFormat = "%Y-%m-%d" if exhaustive_start_time: totalListEnd = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in totalListStart = exhaustive_start_time - timedelta(days=1.5) else: totalListEnd = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in totalListStart = totalListEnd - timedelta(days=20) # Doesn't really matter listStep = timedelta(days=45) listEnd = totalListEnd listStart = max(totalListStart, totalListEnd - listStep) while True: logger.debug("Requesting %s to %s" % (listStart, listEnd)) resp = requests.get( TRAININGPEAKS_API_BASE_URL + "/v1/workouts/%s/%s" % ( listStart.strftime(limitDateFormat), listEnd.strftime(limitDateFormat)), headers=headers) for act in resp.json(): if not act.get("completed", True): continue activity = UploadedActivity() activity.StartTime = dateutil.parser.parse(act["StartTime"]).replace(tzinfo=None) logger.debug("Activity s/t " + str(activity.StartTime)) activity.EndTime = activity.StartTime + timedelta(hours=act["TotalTime"]) activity.Name = act.get("Title", None) activity.Notes = act.get("Description", None) activity.Type = self._workoutTypeMappings.get(act.get("WorkoutType", "").lower(), ActivityType.Other) activity.Stats.Cadence = ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=act.get("CadenceAverage", None), max=act.get("CadenceMaximum", None)) activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=act.get("Distance", None)) activity.Stats.Elevation = ActivityStatistic(ActivityStatisticUnit.Meters, avg=act.get("ElevationAverage", None), min=act.get("ElevationMinimum", None), max=act.get("ElevationMaximum", None), gain=act.get("ElevationGain", None), loss=act.get("ElevationLoss", None)) activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilojoules, value=act.get("Energy", None)) activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=act.get("HeartRateAverage", None), min=act.get("HeartRateMinimum", None), max=act.get("HeartRateMaximum", None)) activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, avg=act.get("PowerAverage", None), max=act.get("PowerMaximum", None)) activity.Stats.Temperature = ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=act.get("TemperatureAverage", None), min=act.get("TemperatureMinimum", None), max=act.get("TemperatureMaximum", None)) activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.MetersPerSecond, avg=act.get("VelocityAverage", None), max=act.get("VelocityMaximum", None)) activity.CalculateUID() activities.append(activity) if not exhaustive_start_time: break listStart -= listStep listEnd -= listStep if listEnd < totalListStart: break return activities, exclusions
def _populate_sbr_activity(self, api_sbr_activity, usersettings): # Example JSON feed (unimportant fields have been removed) # [{ # "EventId": 63128401, # Internal ID # "EventType": 3, # Swim (3), bike (1), or run (2) # "EventDate": "4/22/2016", # "EventTime": "7:44 AM", # User's time, time zone not specified # "Planned": false, # Training plan or actual data # "TotalMinutes": 34.97, # "TotalKilometers": 1.55448, # "AverageHeartRate": 125, # "MinimumHeartRate": 100, # "MaximumHeartRate": 150, # "MemberId": 999999, # "MemberUsername": "******", # "HasDeviceUpload": true, # "DeviceUploadFile": "http://beginnertriathlete.com/discussion/storage/workouts/555555/abcd-123.fit", # "RouteName": "", # Might contain a description of the event # "Comments": "", # User supplied notes # }, ... ] activity = UploadedActivity() workout_id = api_sbr_activity["EventId"] eventType = api_sbr_activity["EventType"] eventDate = api_sbr_activity["EventDate"] eventTime = api_sbr_activity["EventTime"] totalMinutes = api_sbr_activity["TotalMinutes"] totalKms = api_sbr_activity["TotalKilometers"] averageHr = api_sbr_activity["AverageHeartRate"] minimumHr = api_sbr_activity["MinimumHeartRate"] maximumHr = api_sbr_activity["MaximumHeartRate"] deviceUploadFile = api_sbr_activity["DeviceUploadFile"] comments = api_sbr_activity["Comments"] # Basic SBR data does not include GPS or sensor data. If this event originated from a device upload, # DownloadActivity will find it. activity.Stationary = True # Same as above- The data might be there, but it's not supplied in the basic activity feed. activity.GPS = False activity.Notes = comments activity.Private = usersettings["Privacy"] activity.Type = self._workoutTypeMappings[str(eventType)] # Get the user's timezone from their profile. (Activity.TZ should be mentioned in the object hierarchy docs?) # Question: I believe if DownloadActivity finds device data, it will overwrite this. Which is OK with me. # The device data will most likely be more accurate. try: activity.TZ = pytz.timezone(usersettings["TimeZone"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.timezone(self._serverDefaultTimezone) # activity.StartTime and EndTime aren't mentioned in the object hierarchy docs, but I see them # set in all the other providers. activity.StartTime = dateutil.parser.parse( eventDate + " " + eventTime, dayfirst=False).replace(tzinfo=activity.TZ) activity.EndTime = activity.StartTime + timedelta(minutes=totalMinutes) # We can calculate some metrics from the supplied data. Would love to see some non-source code documentation # on each statistic and what it expects as input. activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=totalKms) activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=float(averageHr), min=float(minimumHr), max=float(maximumHr)) activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(totalMinutes * 60)) activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(totalMinutes * 60)) # While BT does support laps, the current API doesn't report on them - a limitation that may need to be # corrected in a future update. For now, treat manual entries as a single lap. As more and more people upload # workouts using devices anyway, this probably matters much less than it once did. lap = Lap(stats=activity.Stats, startTime=activity.StartTime, endTime=activity.EndTime) activity.Laps = [lap] # Not 100% positive how this is utilized, but it is common for all providers. Detects duplicate downloads? activity.CalculateUID() # If a device file is attached, we'll get more details about this event in DownloadActivity activity.ServiceData = { "ID": int(workout_id), "DeviceUploadFile": deviceUploadFile } return activity
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] url = self.SetioDomain + "getRunsByUserId" extID = svcRecord.ExternalID payload = {"userId": extID} headers = { 'content-type': "application/json", 'cache-control': "no-cache", } response = requests.post(url, data=json.dumps(payload), headers=headers) try: reqdata = response.json() except ValueError: raise APIException("Failed parsing Setio list response %s - %s" % (resp.status_code, resp.text)) for ride in reqdata: activity = UploadedActivity() activity.StartTime = datetime.strptime( datetime.utcfromtimestamp(ride["startTimeStamp"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") if "stopTimeStamp" in ride: activity.EndTime = datetime.strptime( datetime.utcfromtimestamp(ride["stopTimeStamp"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") activity.ServiceData = {"ActivityID": ride["runId"], "Manual": "False"} activity.Name = ride["programName"] logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, activity.Name)) activity.Type = ActivityType.Running if "totalDistance" in ride: activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=ride["totalDistance"]) if "averageCadence" in ride: activity.Stats.Cadence.update( ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=ride["averageCadence"])) if "averageSpeed" in ride: activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.MetersPerSecond, avg=ride["averageSpeed"]) # get comment url = self.SetioDomain + "getRunComment" payload = { "userId": extID, "runId": activity.ServiceData["ActivityID"]} headers = { 'content-type': "application/json", 'cache-control': "no-cache", } streamdata = requests.post(url, data=json.dumps(payload), headers=headers) if streamdata.status_code == 500: raise APIException("Internal server error") if streamdata.status_code == 403: raise APIException("No authorization to download activity", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) activity.Notes = None if streamdata.status_code == 200: # Ok try: commentdata = streamdata.json() except: raise APIException("Stream data returned is not JSON") if "comment" in commentdata: activity.Notes = commentdata["comment"] activity.GPS = True activity.Private = False activity.Stationary = False # True = no sensor data activity.CalculateUID() activities.append(activity) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 cookies = self._get_cookies(record=serviceRecord) page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({ "start": (page - 1) * pageSz, "limit": pageSz })) self._rate_limit() res = requests.get( "http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={ "start": (page - 1) * pageSz, "limit": pageSz }, cookies=cookies) res = res.json()["results"] if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] if "sumDistance" not in act: exclusions.append( APIExcludeActivity("No distance", activityId=act["activityId"], userException=UserException( UserExceptionType.Corrupt))) continue activity = UploadedActivity() if "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act: # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = True else: activity.Stationary = False try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset( float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip( )) and act["activityName"][ "value"] != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize( datetime.utcfromtimestamp( float(act["beginTimestamp"]["millis"]) / 1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta( 0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta( minutes=float(act["sumDuration"] ["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"] ["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize( datetime.utcfromtimestamp( float(act["endTimestamp"]["millis"]) / 1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time. activity.Stats.Distance = ActivityStatistic( self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])) def mapStat(gcKey, statKey, type, useSourceUnits=False): nonlocal activity, act if gcKey in act: value = float(act[gcKey]["value"]) if math.isinf(value): return # GC returns the minimum speed as "-Infinity" instead of 0 some times :S activity.Stats.__dict__[statKey].update( ActivityStatistic(self._unitMap[act[gcKey]["uom"]], **({ type: value }))) if useSourceUnits: activity.Stats.__dict__[ statKey] = activity.Stats.__dict__[ statKey].asUnits( self._unitMap[act[gcKey]["uom"]]) if "sumMovingDuration" in act: activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Time, value=timedelta( seconds=float(act["sumMovingDuration"]["value"]))) if "sumDuration" in act: activity.Stats.TimerTime = ActivityStatistic( ActivityStatisticUnit.Time, value=timedelta( minutes=float(act["sumDuration"] ["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"] ["minutesSeconds"].split(":")[1]))) mapStat( "minSpeed", "Speed", "min", useSourceUnits=True ) # We need to suppress conversion here, so we can fix the pace-speed issue below mapStat("maxSpeed", "Speed", "max", useSourceUnits=True) mapStat("weightedMeanSpeed", "Speed", "avg", useSourceUnits=True) mapStat("minAirTemperature", "Temperature", "min") mapStat("maxAirTemperature", "Temperature", "max") mapStat("weightedMeanAirTemperature", "Temperature", "avg") mapStat("sumEnergy", "Energy", "value") mapStat("maxHeartRate", "HR", "max") mapStat("weightedMeanHeartRate", "HR", "avg") mapStat("maxRunCadence", "RunCadence", "max") mapStat("weightedMeanRunCadence", "RunCadence", "avg") mapStat("maxBikeCadence", "Cadence", "max") mapStat("weightedMeanBikeCadence", "Cadence", "avg") mapStat("minPower", "Power", "min") mapStat("maxPower", "Power", "max") mapStat("weightedMeanPower", "Power", "avg") mapStat("minElevation", "Elevation", "min") mapStat("maxElevation", "Elevation", "max") mapStat("gainElevation", "Elevation", "gain") mapStat("lossElevation", "Elevation", "loss") # In Garmin Land, max can be smaller than min for this field :S if activity.Stats.Power.Max is not None and activity.Stats.Power.Min is not None and activity.Stats.Power.Min > activity.Stats.Power.Max: activity.Stats.Power.Min = None # To get it to match what the user sees in GC. if activity.Stats.RunCadence.Max is not None: activity.Stats.RunCadence.Max *= 2 if activity.Stats.RunCadence.Average is not None: activity.Stats.RunCadence.Average *= 2 # GC incorrectly reports pace measurements as kph/mph when they are in fact in min/km or min/mi if "minSpeed" in act: if ":" in act["minSpeed"][ "withUnitAbbr"] and activity.Stats.Speed.Min: activity.Stats.Speed.Min = 60 / activity.Stats.Speed.Min if "maxSpeed" in act: if ":" in act["maxSpeed"][ "withUnitAbbr"] and activity.Stats.Speed.Max: activity.Stats.Speed.Max = 60 / activity.Stats.Speed.Max if "weightedMeanSpeed" in act: if ":" in act["weightedMeanSpeed"][ "withUnitAbbr"] and activity.Stats.Speed.Average: activity.Stats.Speed.Average = 60 / activity.Stats.Speed.Average # Similarly, they do weird stuff with HR at times - %-of-max and zones # ...and we can't just fix these, so we have to calculate it after the fact (blegh) recalcHR = False if "maxHeartRate" in act: if "%" in act["maxHeartRate"]["withUnitAbbr"] or "z" in act[ "maxHeartRate"]["withUnitAbbr"]: activity.Stats.HR.Max = None recalcHR = True if "weightedMeanHeartRate" in act: if "%" in act["weightedMeanHeartRate"][ "withUnitAbbr"] or "z" in act[ "weightedMeanHeartRate"]["withUnitAbbr"]: activity.Stats.HR.Average = None recalcHR = True activity.Type = self._resolveActivityType( act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = { "ActivityID": act["activityId"], "RecalcHR": recalcHR } activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): activities = [] session = self._get_session(record=serviceRecord) session.headers.update({"Accept": "application/json"}) workouts_resp = session.get( "https://api.trainerroad.com/api/careerworkouts") if workouts_resp.status_code != 200: if workouts_resp.status_code == 401: raise APIException("Invalid login", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) raise APIException("Workout listing error") cached_record = cachedb.trainerroad_meta.find_one( {"ExternalID": serviceRecord.ExternalID}) if not cached_record: cached_workout_meta = {} else: cached_workout_meta = cached_record["Workouts"] workouts = workouts_resp.json() for workout in workouts: # Un/f their API doesn't provide the start/end times in the list response # So we need to pull the extra data, if it's not already cached workout_id = str(workout["Id"]) # Mongo doesn't do non-string keys if workout_id not in cached_workout_meta: meta_resp = session.get( "https://api.trainerroad.com/api/careerworkouts?guid=%s" % workout["Guid"]) # We don't need everything full_meta = meta_resp.json() meta = { key: full_meta[key] for key in [ "WorkoutDate", "WorkoutName", "WorkoutNotes", "TotalMinutes", "TotalKM", "AvgWatts", "Kj" ] } cached_workout_meta[workout_id] = meta else: meta = cached_workout_meta[workout_id] activity = UploadedActivity() activity.ServiceData = {"ID": int(workout_id)} activity.Name = meta["WorkoutName"] activity.Notes = meta["WorkoutNotes"] activity.Type = ActivityType.Cycling # Everything's in UTC activity.StartTime = dateutil.parser.parse( meta["WorkoutDate"]).replace(tzinfo=pytz.utc) activity.EndTime = activity.StartTime + timedelta( minutes=meta["TotalMinutes"]) activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Kilometers, value=meta["TotalKM"]) activity.Stats.Power = ActivityStatistic( ActivityStatisticUnit.Watts, avg=meta["AvgWatts"]) activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilojoules, value=meta["Kj"]) activity.Stationary = False activity.GPS = False activity.CalculateUID() activities.append(activity) cachedb.trainerroad_meta.update( {"ExternalID": serviceRecord.ExternalID}, { "ExternalID": serviceRecord.ExternalID, "Workouts": cached_workout_meta }, upsert=True) return activities, []
def DownloadActivityList(self, serviceRecord, exhaustive=False): logger.debug("DownloadActivityList") allItems = [] headers = self._apiHeaders(serviceRecord) nextRequest = '/v7.1/workout/?user=' + str(serviceRecord.ExternalID) while True: response = requests.get("https://api.mapmyfitness.com" + nextRequest, headers=headers) if response.status_code != 200: if response.status_code == 401 or response.status_code == 403: raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) raise APIException( "Unable to retrieve activity list " + str(response), serviceRecord) data = response.json() allItems += data["_embedded"]["workouts"] nextLink = data["_links"].get("next") if not exhaustive or not nextLink: break nextRequest = nextLink[0]["href"] activities = [] exclusions = [] for act in allItems: # TODO catch exception and add to exclusions activity = UploadedActivity() activityID = act["_links"]["self"][0]["id"] activity.StartTime = datetime.strptime(act["start_datetime"], "%Y-%m-%dT%H:%M:%S%z") activity.Notes = act["notes"] if "notes" in act else None # aggregate aggregates = act["aggregates"] elapsed_time_total = aggregates[ "elapsed_time_total"] if "elapsed_time_total" in aggregates else "0" activity.EndTime = activity.StartTime + timedelta( 0, round(float(elapsed_time_total))) activity.Stats.TimerTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=float(elapsed_time_total)) activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=float(elapsed_time_total)) if "active_time_total" in aggregates: activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=float(aggregates["active_time_total"])) if "distance_total" in aggregates: activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=float(aggregates["distance_total"])) if "speed_min" in aggregates: activity.Stats.Speed.Min = float(aggregates["speed_min"]) if "speed_max" in aggregates: activity.Stats.Speed.Max = float(aggregates["speed_max"]) if "speed_avg" in aggregates: activity.Stats.Speed.Average = float(aggregates["speed_avg"]) if "heartrate_min" in aggregates: activity.Stats.HR.update( ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, min=float(aggregates["heartrate_min"]))) if "heartrate_max" in aggregates: activity.Stats.HR.update( ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=float(aggregates["heartrate_max"]))) if "heartrate_avg" in aggregates: activity.Stats.HR = ActivityStatistic( ActivityStatisticUnit.BeatsPerMinute, avg=float(aggregates["heartrate_avg"])) if "cadence_min" in aggregates: activity.Stats.Cadence.update( ActivityStatistic( ActivityStatisticUnit.RevolutionsPerMinute, min=int(aggregates["cadence_min"]))) if "cadence_max" in aggregates: activity.Stats.Cadence.update( ActivityStatistic( ActivityStatisticUnit.RevolutionsPerMinute, max=int(aggregates["cadence_max"]))) if "cadence_avg" in aggregates: activity.Stats.Cadence = ActivityStatistic( ActivityStatisticUnit.RevolutionsPerMinute, avg=int(aggregates["cadence_avg"])) if "power_min" in aggregates: activity.Stats.Power.update( ActivityStatistic(ActivityStatisticUnit.Watts, min=int(aggregates["power_min"]))) if "power_max" in aggregates: activity.Stats.Power.update( ActivityStatistic(ActivityStatisticUnit.Watts, max=int(aggregates["power_max"]))) if "power_avg" in aggregates: activity.Stats.Power = ActivityStatistic( ActivityStatisticUnit.Watts, avg=int(aggregates["power_avg"])) activityTypeLink = act["_links"].get("activity_type") activityTypeID = activityTypeLink[0][ "id"] if activityTypeLink is not None else None privacyLink = act["_links"].get("privacy") privacyID = privacyLink[0][ "id"] if privacyLink is not None else None activity.Private = privacyID == "0" activity.Type = self._resolveActivityType(activityTypeID, headers) activity.ServiceData = { "ActivityID": activityID, "activityTypeID": activityTypeID, "privacyID": privacyID } activity.CalculateUID() activities.append(activity) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): # http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 cookies = self._get_cookies(record=serviceRecord) page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) self._rate_limit() res = requests.get( "http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}, cookies=cookies, ) res = res.json()["results"] if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] if "sumDistance" not in act: exclusions.append(APIExcludeActivity("No distance", activityId=act["activityId"])) continue activity = UploadedActivity() if ( "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act ): # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = True else: activity.Stationary = False try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if ( len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled" ): # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize( datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"]) / 1000) ) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta( 0, round(float(act["sumElapsedDuration"]["value"])) ) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta( minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]), ) else: activity.EndTime = pytz.utc.localize( datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"]) / 1000) ) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time. activity.Stats.Distance = ActivityStatistic( self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"]) ) def mapStat(gcKey, statKey, type, useSourceUnits=False): nonlocal activity, act if gcKey in act: value = float(act[gcKey]["value"]) if math.isinf(value): return # GC returns the minimum speed as "-Infinity" instead of 0 some times :S activity.Stats.__dict__[statKey].update( ActivityStatistic(self._unitMap[act[gcKey]["uom"]], **({type: value})) ) if useSourceUnits: activity.Stats.__dict__[statKey] = activity.Stats.__dict__[statKey].asUnits( self._unitMap[act[gcKey]["uom"]] ) if "sumMovingDuration" in act: activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Time, value=timedelta(seconds=float(act["sumMovingDuration"]["value"])) ) if "sumDuration" in act: activity.Stats.TimerTime = ActivityStatistic( ActivityStatisticUnit.Time, value=timedelta( minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]), ), ) mapStat( "minSpeed", "Speed", "min", useSourceUnits=True ) # We need to suppress conversion here, so we can fix the pace-speed issue below mapStat("maxSpeed", "Speed", "max", useSourceUnits=True) mapStat("weightedMeanSpeed", "Speed", "avg", useSourceUnits=True) mapStat("minAirTemperature", "Temperature", "min") mapStat("maxAirTemperature", "Temperature", "max") mapStat("weightedMeanAirTemperature", "Temperature", "avg") mapStat("sumEnergy", "Energy", "value") mapStat("maxHeartRate", "HR", "max") mapStat("weightedMeanHeartRate", "HR", "avg") mapStat("maxRunCadence", "RunCadence", "max") mapStat("weightedMeanRunCadence", "RunCadence", "avg") mapStat("maxBikeCadence", "Cadence", "max") mapStat("weightedMeanBikeCadence", "Cadence", "avg") mapStat("minPower", "Power", "min") mapStat("maxPower", "Power", "max") mapStat("weightedMeanPower", "Power", "avg") mapStat("minElevation", "Elevation", "min") mapStat("maxElevation", "Elevation", "max") mapStat("gainElevation", "Elevation", "gain") mapStat("lossElevation", "Elevation", "loss") # In Garmin Land, max can be smaller than min for this field :S if ( activity.Stats.Power.Max is not None and activity.Stats.Power.Min is not None and activity.Stats.Power.Min > activity.Stats.Power.Max ): activity.Stats.Power.Min = None # To get it to match what the user sees in GC. if activity.Stats.RunCadence.Max is not None: activity.Stats.RunCadence.Max *= 2 if activity.Stats.RunCadence.Average is not None: activity.Stats.RunCadence.Average *= 2 # GC incorrectly reports pace measurements as kph/mph when they are in fact in min/km or min/mi if "minSpeed" in act: if ":" in act["minSpeed"]["withUnitAbbr"] and activity.Stats.Speed.Min: activity.Stats.Speed.Min = 60 / activity.Stats.Speed.Min if "maxSpeed" in act: if ":" in act["maxSpeed"]["withUnitAbbr"] and activity.Stats.Speed.Max: activity.Stats.Speed.Max = 60 / activity.Stats.Speed.Max if "weightedMeanSpeed" in act: if ":" in act["weightedMeanSpeed"]["withUnitAbbr"] and activity.Stats.Speed.Average: activity.Stats.Speed.Average = 60 / activity.Stats.Speed.Average # Similarly, they do weird stuff with HR at times - %-of-max and zones # ...and we can't just fix these, so we have to calculate it after the fact (blegh) recalcHR = False if "maxHeartRate" in act: if "%" in act["maxHeartRate"]["withUnitAbbr"] or "z" in act["maxHeartRate"]["withUnitAbbr"]: activity.Stats.HR.Max = None recalcHR = True if "weightedMeanHeartRate" in act: if ( "%" in act["weightedMeanHeartRate"]["withUnitAbbr"] or "z" in act["weightedMeanHeartRate"]["withUnitAbbr"] ): activity.Stats.HR.Average = None recalcHR = True activity.Type = self._resolveActivityType(act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": act["activityId"], "RecalcHR": recalcHR} activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def _populate_sbr_activity(self, api_sbr_activity, usersettings): # Example JSON feed (unimportant fields have been removed) # [{ # "EventId": 63128401, # Internal ID # "EventType": 3, # Swim (3), bike (1), or run (2) # "EventDate": "4/22/2016", # "EventTime": "7:44 AM", # User's time, time zone not specified # "Planned": false, # Training plan or actual data # "TotalMinutes": 34.97, # "TotalKilometers": 1.55448, # "AverageHeartRate": 125, # "MinimumHeartRate": 100, # "MaximumHeartRate": 150, # "MemberId": 999999, # "MemberUsername": "******", # "HasDeviceUpload": true, # "DeviceUploadFile": "http://beginnertriathlete.com/discussion/storage/workouts/555555/abcd-123.fit", # "RouteName": "", # Might contain a description of the event # "Comments": "", # User supplied notes # }, ... ] activity = UploadedActivity() workout_id = api_sbr_activity["EventId"] eventType = api_sbr_activity["EventType"] eventDate = api_sbr_activity["EventDate"] eventTime = api_sbr_activity["EventTime"] totalMinutes = api_sbr_activity["TotalMinutes"] totalKms = api_sbr_activity["TotalKilometers"] averageHr = api_sbr_activity["AverageHeartRate"] minimumHr = api_sbr_activity["MinimumHeartRate"] maximumHr = api_sbr_activity["MaximumHeartRate"] deviceUploadFile = api_sbr_activity["DeviceUploadFile"] comments = api_sbr_activity["Comments"] # Basic SBR data does not include GPS or sensor data. If this event originated from a device upload, # DownloadActivity will find it. activity.Stationary = True # Same as above- The data might be there, but it's not supplied in the basic activity feed. activity.GPS = False activity.Notes = comments activity.Private = usersettings["Privacy"] activity.Type = self._workoutTypeMappings[str(eventType)] # Get the user's timezone from their profile. (Activity.TZ should be mentioned in the object hierarchy docs?) # Question: I believe if DownloadActivity finds device data, it will overwrite this. Which is OK with me. # The device data will most likely be more accurate. try: activity.TZ = pytz.timezone(usersettings["TimeZone"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.timezone(self._serverDefaultTimezone) # activity.StartTime and EndTime aren't mentioned in the object hierarchy docs, but I see them # set in all the other providers. activity.StartTime = dateutil.parser.parse( eventDate + " " + eventTime, dayfirst=False).replace(tzinfo=activity.TZ) activity.EndTime = activity.StartTime + timedelta(minutes=totalMinutes) # We can calculate some metrics from the supplied data. Would love to see some non-source code documentation # on each statistic and what it expects as input. activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Kilometers, value=totalKms) activity.Stats.HR = ActivityStatistic( ActivityStatisticUnit.BeatsPerMinute, avg=float(averageHr), min=float(minimumHr), max=float(maximumHr)) activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=float(totalMinutes * 60)) activity.Stats.TimerTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=float(totalMinutes * 60)) # While BT does support laps, the current API doesn't report on them - a limitation that may need to be # corrected in a future update. For now, treat manual entries as a single lap. As more and more people upload # workouts using devices anyway, this probably matters much less than it once did. lap = Lap(stats=activity.Stats, startTime=activity.StartTime, endTime=activity.EndTime) activity.Laps = [lap] # Not 100% positive how this is utilized, but it is common for all providers. Detects duplicate downloads? activity.CalculateUID() # If a device file is attached, we'll get more details about this event in DownloadActivity activity.ServiceData = { "ID": int(workout_id), "DeviceUploadFile": deviceUploadFile } return activity
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 session = self._get_session(record=serviceRecord) page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) self._rate_limit() retried_auth = False while True: res = session.get("http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}) # It's 10 PM and I have no clue why it's throwing these errors, maybe we just need to log in again? if res.status_code == 403 and not retried_auth: retried_auth = True session = self._get_session(serviceRecord, skip_cache=True) else: break try: res = res.json()["results"] except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s" % res.status_code) if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] activity = UploadedActivity() # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act activity.GPS = "endLatitude" in act activity.Private = act["privacy"]["key"] == "private" try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() if "sumDistance" in act and float(act["sumDistance"]["value"]) != 0: activity.Stats.Distance = ActivityStatistic(self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])) activity.Type = self._resolveActivityType(act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive_start_time=None): activities = [] exclusions = [] headers = self._apiHeaders(svcRecord) limitDateFormat = "%Y-%m-%d" if exhaustive_start_time: totalListEnd = datetime.now() + timedelta( days=1.5) # Who knows which TZ it's in totalListStart = exhaustive_start_time - timedelta(days=1.5) else: totalListEnd = datetime.now() + timedelta( days=1.5) # Who knows which TZ it's in totalListStart = totalListEnd - timedelta( days=20) # Doesn't really matter listStep = timedelta(days=45) listEnd = totalListEnd listStart = max(totalListStart, totalListEnd - listStep) while True: logger.debug("Requesting %s to %s" % (listStart, listEnd)) resp = requests.get(TRAININGPEAKS_API_BASE_URL + "/v1/workouts/%s/%s" % (listStart.strftime(limitDateFormat), listEnd.strftime(limitDateFormat)), headers=headers) for act in resp.json(): if not act.get("completed", True): continue activity = UploadedActivity() activity.StartTime = dateutil.parser.parse( act["StartTime"]).replace(tzinfo=None) logger.debug("Activity s/t " + str(activity.StartTime)) activity.EndTime = activity.StartTime + timedelta( hours=act["TotalTime"]) activity.Name = act.get("Title", None) activity.Notes = act.get("Description", None) activity.Type = self._workoutTypeMappings.get( act.get("WorkoutType", "").lower(), ActivityType.Other) activity.Stats.Cadence = ActivityStatistic( ActivityStatisticUnit.RevolutionsPerMinute, avg=act.get("CadenceAverage", None), max=act.get("CadenceMaximum", None)) activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=act.get("Distance", None)) activity.Stats.Elevation = ActivityStatistic( ActivityStatisticUnit.Meters, avg=act.get("ElevationAverage", None), min=act.get("ElevationMinimum", None), max=act.get("ElevationMaximum", None), gain=act.get("ElevationGain", None), loss=act.get("ElevationLoss", None)) activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilojoules, value=act.get("Energy", None)) activity.Stats.HR = ActivityStatistic( ActivityStatisticUnit.BeatsPerMinute, avg=act.get("HeartRateAverage", None), min=act.get("HeartRateMinimum", None), max=act.get("HeartRateMaximum", None)) activity.Stats.Power = ActivityStatistic( ActivityStatisticUnit.Watts, avg=act.get("PowerAverage", None), max=act.get("PowerMaximum", None)) activity.Stats.Temperature = ActivityStatistic( ActivityStatisticUnit.DegreesCelcius, avg=act.get("TemperatureAverage", None), min=act.get("TemperatureMinimum", None), max=act.get("TemperatureMaximum", None)) activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=act.get("VelocityAverage", None), max=act.get("VelocityMaximum", None)) activity.CalculateUID() activities.append(activity) if not exhaustive_start_time: break listStart -= listStep listEnd -= listStep if listEnd < totalListStart: break return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] url = self.SetioDomain + "getRunsByUserId" extID = svcRecord.ExternalID payload = {"userId": extID} headers = { 'content-type': "application/json", 'cache-control': "no-cache", } response = requests.post(url, data=json.dumps(payload), headers=headers) try: reqdata = response.json() except ValueError: raise APIException("Failed parsing Setio list response %s - %s" % (resp.status_code, resp.text)) for ride in reqdata: activity = UploadedActivity() activity.StartTime = datetime.strptime( datetime.utcfromtimestamp( ride["startTimeStamp"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") if "stopTimeStamp" in ride: activity.EndTime = datetime.strptime( datetime.utcfromtimestamp( ride["stopTimeStamp"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") activity.ServiceData = { "ActivityID": ride["runId"], "Manual": "False" } activity.Name = ride["programName"] logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, activity.Name)) activity.Type = ActivityType.Running if "totalDistance" in ride: activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=ride["totalDistance"]) if "averageCadence" in ride: activity.Stats.Cadence.update( ActivityStatistic( ActivityStatisticUnit.RevolutionsPerMinute, avg=ride["averageCadence"])) if "averageSpeed" in ride: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=ride["averageSpeed"]) # get comment url = self.SetioDomain + "getRunComment" payload = { "userId": extID, "runId": activity.ServiceData["ActivityID"] } headers = { 'content-type': "application/json", 'cache-control': "no-cache", } streamdata = requests.post(url, data=json.dumps(payload), headers=headers) if streamdata.status_code == 500: raise APIException("Internal server error") if streamdata.status_code == 403: raise APIException("No authorization to download activity", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) activity.Notes = None if streamdata.status_code == 200: # Ok try: commentdata = streamdata.json() except: raise APIException("Stream data returned is not JSON") if "comment" in commentdata: activity.Notes = commentdata["comment"] activity.GPS = True activity.Private = False activity.Stationary = False # True = no sensor data activity.CalculateUID() activities.append(activity) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 cookies = self._get_cookies(record=serviceRecord) page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) self._rate_limit() res = requests.get("http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}, cookies=cookies) try: res = res.json()["results"] except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s" % res.status_code) if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] if "sumDistance" not in act: exclusions.append(APIExcludeActivity("No distance", activityId=act["activityId"], userException=UserException(UserExceptionType.Corrupt))) continue activity = UploadedActivity() if "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act: # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = True else: activity.Stationary = False activity.Private = act["privacy"]["key"] == "private" try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time. activity.Stats.Distance = ActivityStatistic(self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])) activity.Type = self._resolveActivityType(act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": act["activityId"]} activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({ "start": (page - 1) * pageSz, "limit": pageSz })) res = self._request_with_reauth( serviceRecord, lambda session: session.get( "https://connect.garmin.com/modern/proxy/activity-search-service-1.0/json/activities", params={ "start": (page - 1) * pageSz, "limit": pageSz })) try: res = res.json()["results"] except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res.text)) if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] activity = UploadedActivity() # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act activity.GPS = "endLatitude" in act activity.Private = act["privacy"]["key"] == "private" try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset( float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip( )) and act["activityName"][ "value"] != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize( datetime.utcfromtimestamp( float(act["beginTimestamp"]["millis"]) / 1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta( 0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta( minutes=float(act["sumDuration"] ["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"] ["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize( datetime.utcfromtimestamp( float(act["endTimestamp"]["millis"]) / 1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() if "sumDistance" in act and float( act["sumDistance"]["value"]) != 0: activity.Stats.Distance = ActivityStatistic( self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])) if "device" in act and act["device"]["key"] != "unknown": devId = DeviceIdentifier.FindMatchingIdentifierOfType( DeviceIdentifierType.GC, {"Key": act["device"]["key"]}) ver_split = act["device"]["key"].split(".") ver_maj = None ver_min = None if len(ver_split) == 4: # 2.90.0.0 ver_maj = int(ver_split[0]) ver_min = int(ver_split[1]) activity.Device = Device(devId, verMaj=ver_maj, verMin=ver_min) activity.Type = self._resolveActivityType( act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): def mapStatTriple(act, stats_obj, key, units): if "%s_max" % key in act and act["%s_max" % key]: stats_obj.update(ActivityStatistic(units, max=float(act["%s_max" % key]))) if "%s_min" % key in act and act["%s_min" % key]: stats_obj.update(ActivityStatistic(units, min=float(act["%s_min" % key]))) if "%s_avg" % key in act and act["%s_avg" % key]: stats_obj.update(ActivityStatistic(units, avg=float(act["%s_avg" % key]))) # http://ridewithgps.com/users/1/trips.json?limit=200&order_by=created_at&order_dir=asc # offset also supported activities = [] exclusions = [] # They don't actually support paging right now, for whatever reason params = self._add_auth_params({}, record=serviceRecord) res = requests.get("http://ridewithgps.com/users/{}/trips.json".format(serviceRecord.ExternalID), params=params) res = res.json() # Apparently some API users are seeing this new result format - I'm not if type(res) is dict: res = res.get("results", []) if res == []: return [], [] # No activities for act in res: if "distance" not in act: exclusions.append(APIExcludeActivity("No distance", activity_id=act["id"], user_exception=UserException(UserExceptionType.Corrupt))) continue if "duration" not in act or not act["duration"]: exclusions.append(APIExcludeActivity("No duration", activity_id=act["id"], user_exception=UserException(UserExceptionType.Corrupt))) continue activity = UploadedActivity() logger.debug("Name " + act["name"] + ":") if len(act["name"].strip()): activity.Name = act["name"] if len(act["description"].strip()): activity.Notes = act["description"] activity.GPS = act["is_gps"] activity.Stationary = not activity.GPS # I think # 0 = public, 1 = private, 2 = friends activity.Private = act["visibility"] == 1 activity.StartTime = dateutil.parser.parse(act["departed_at"]) try: activity.TZ = pytz.timezone(act["time_zone"]) except pytz.exceptions.UnknownTimeZoneError: # Sometimes the time_zone returned isn't quite what we'd like it # So, just pull the offset from the datetime if isinstance(activity.StartTime.tzinfo, tzutc): activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value. else: activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo.utcoffset(activity.StartTime).total_seconds() / 60) activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ) # Overwrite dateutil's sillyness activity.EndTime = activity.StartTime + timedelta(seconds=self._duration_to_seconds(act["duration"])) logger.debug("Activity s/t " + str(activity.StartTime)) activity.AdjustTZ() activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, float(act["distance"])) mapStatTriple(act, activity.Stats.Power, "watts", ActivityStatisticUnit.Watts) mapStatTriple(act, activity.Stats.Speed, "speed", ActivityStatisticUnit.KilometersPerHour) mapStatTriple(act, activity.Stats.Cadence, "cad", ActivityStatisticUnit.RevolutionsPerMinute) mapStatTriple(act, activity.Stats.HR, "hr", ActivityStatisticUnit.BeatsPerMinute) if "elevation_gain" in act and act["elevation_gain"]: activity.Stats.Elevation.update(ActivityStatistic(ActivityStatisticUnit.Meters, gain=float(act["elevation_gain"]))) if "elevation_loss" in act and act["elevation_loss"]: activity.Stats.Elevation.update(ActivityStatistic(ActivityStatisticUnit.Meters, loss=float(act["elevation_loss"]))) # Activity type is not implemented yet in RWGPS results; we will assume cycling, though perhaps "OTHER" wouuld be correct activity.Type = ActivityType.Cycling activity.CalculateUID() activity.ServiceData = {"ActivityID": act["id"]} activities.append(activity) return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] url = self.SingletrackerDomain + "getRidesByUserId" extID = svcRecord.ExternalID payload = {"userId": extID} headers = { 'content-type': "application/json", 'cache-control': "no-cache", } response = requests.post(url, data=json.dumps(payload), headers=headers) try: reqdata = response.json() except ValueError: raise APIException( "Failed parsing Singletracker list response %s - %s" % (resp.status_code, resp.text)) for ride in reqdata: activity = UploadedActivity() activity.StartTime = datetime.strptime( datetime.utcfromtimestamp( ride["startTime"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") if "stopTime" in ride: activity.EndTime = datetime.strptime( datetime.utcfromtimestamp( ride["stopTime"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") activity.ServiceData = { "ActivityID": ride["rideId"], "Manual": "False" } activity.Name = ride["trackName"] logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, activity.Name)) activity.Type = ActivityType.MountainBiking if "totalDistance" in ride: activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=ride["totalDistance"]) if "avgSpeed" in ride: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=ride["avgSpeed"]) activity.Notes = None activity.GPS = True activity.Private = False activity.Stationary = False # True = no sensor data activity.CalculateUID() activities.append(activity) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 session = self._get_session(record=serviceRecord) page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({ "start": (page - 1) * pageSz, "limit": pageSz })) self._rate_limit() retried_auth = False while True: res = session.get( "http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={ "start": (page - 1) * pageSz, "limit": pageSz }) # It's 10 PM and I have no clue why it's throwing these errors, maybe we just need to log in again? if res.status_code == 403 and not retried_auth: retried_auth = True session = self._get_session(serviceRecord, skip_cache=True) else: break try: res = res.json()["results"] except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s" % res.status_code) if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] if "sumDistance" not in act: exclusions.append( APIExcludeActivity("No distance", activityId=act["activityId"], userException=UserException( UserExceptionType.Corrupt))) continue activity = UploadedActivity() # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act activity.GPS = "endLatitude" in act activity.Private = act["privacy"]["key"] == "private" try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset( float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip( )) and act["activityName"][ "value"] != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize( datetime.utcfromtimestamp( float(act["beginTimestamp"]["millis"]) / 1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta( 0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta( minutes=float(act["sumDuration"] ["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"] ["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize( datetime.utcfromtimestamp( float(act["endTimestamp"]["millis"]) / 1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time. activity.Stats.Distance = ActivityStatistic( self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])) activity.Type = self._resolveActivityType( act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions