def DownloadActivityList(self, serviceRecord, exhaustive=False): cookies = self._get_cookies(serviceRecord) activities = [] exclusions = [] pageUri = self.OpenFitEndpoint + "/fitnessActivities.json" while True: logger.debug("Req against " + pageUri) res = requests.get(pageUri, cookies=cookies) res = res.json() for act in res["items"]: activity = UploadedActivity() if len(act["name"].strip()): activity.Name = act["name"] activity.StartTime = dateutil.parser.parse(act["start_time"]) activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo._offset.total_seconds() / 60) # Convert the dateutil lame timezones into pytz awesome timezones. activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ) activity.EndTime = activity.StartTime + timedelta(seconds=float(act["duration"])) logger.debug("Activity s/t " + str(activity.StartTime)) activity.Distance = float(act["total_distance"]) activity.Type = self._activityMappings[act["type"].lower()] activity.CalculateUID() activity.UploadedTo = [{"Connection": serviceRecord, "ActivityURI": act["uri"]}] activities.append(activity) if not exhaustive or "next" not in res or not len(res["next"]): break else: pageUri = res["next"] return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): cookies = self._get_cookies(record=serviceRecord) activities = [] exclusions = [] pageUri = self.OpenFitEndpoint + "/fitnessActivities.json" while True: logger.debug("Req against " + pageUri) res = requests.get(pageUri, cookies=cookies) res = res.json() for act in res["items"]: activity = UploadedActivity() activity.ServiceData = {"ActivityURI": act["uri"]} if len(act["name"].strip()): activity.Name = act["name"] activity.StartTime = dateutil.parser.parse(act["start_time"]) if isinstance(activity.StartTime.tzinfo, tzutc): activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value. else: activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo._offset.total_seconds() / 60) # Convert the dateutil lame timezones into pytz awesome timezones. activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ) activity.EndTime = activity.StartTime + timedelta(seconds=float(act["duration"])) activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Time, value=timedelta(seconds=float(act["duration"]))) # OpenFit says this excludes paused times. # Sometimes activities get returned with a UTC timezone even when they are clearly not in UTC. if activity.TZ == pytz.utc: # So, we get the first location in the activity and calculate the TZ from that. try: firstLocation = self._downloadActivity(serviceRecord, activity, returnFirstLocation=True) except APIExcludeActivity: pass else: activity.CalculateTZ(firstLocation) activity.AdjustTZ() logger.debug("Activity s/t " + str(activity.StartTime)) activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(act["total_distance"])) types = [x.strip().lower() for x in act["type"].split(":")] types.reverse() # The incoming format is like "walking: hiking" and we want the most specific first activity.Type = None for type_key in types: if type_key in self._activityMappings: activity.Type = self._activityMappings[type_key] break if not activity.Type: exclusions.append(APIExcludeActivity("Unknown activity type %s" % act["type"], activityId=act["uri"])) continue activity.CalculateUID() activities.append(activity) if not exhaustive or "next" not in res or not len(res["next"]): break else: pageUri = res["next"] return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): cookies = self._get_cookies(record=serviceRecord) activities = [] exclusions = [] pageUri = self.OpenFitEndpoint + "/fitnessActivities.json" while True: logger.debug("Req against " + pageUri) res = requests.get(pageUri, cookies=cookies) res = res.json() for act in res["items"]: activity = UploadedActivity() activity.UploadedTo = [{"Connection": serviceRecord, "ActivityURI": act["uri"]}] if len(act["name"].strip()): activity.Name = act["name"] activity.StartTime = dateutil.parser.parse(act["start_time"]) if isinstance(activity.StartTime.tzinfo, tzutc): activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value. else: activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo._offset.total_seconds() / 60) # Convert the dateutil lame timezones into pytz awesome timezones. activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ) activity.EndTime = activity.StartTime + timedelta(seconds=float(act["duration"])) # Sometimes activities get returned with a UTC timezone even when they are clearly not in UTC. if activity.TZ == pytz.utc: # So, we get the first location in the activity and calculate the TZ from that. try: firstLocation = self._downloadActivity(serviceRecord, activity, returnFirstLocation=True) except APIExcludeActivity: pass else: activity.CalculateTZ(firstLocation) activity.AdjustTZ() logger.debug("Activity s/t " + str(activity.StartTime)) activity.Distance = float(act["total_distance"]) types = [x.strip().lower() for x in act["type"].split(":")] types.reverse() # The incoming format is like "walking: hiking" and we want the most specific first activity.Type = None for type_key in types: if type_key in self._activityMappings: activity.Type = self._activityMappings[type_key] break if not activity.Type: exclusions.append(APIExcludeActivity("Unknown activity type %s" % act["type"], activityId=act["uri"])) continue activity.CalculateUID() activities.append(activity) if not exhaustive or "next" not in res or not len(res["next"]): break else: pageUri = res["next"] return activities, exclusions
def _create_activity(self, activity_data): activity = UploadedActivity() activity.GPS = not activity_data["has-route"] if "detailed-sport-info" in activity_data and activity_data["detailed-sport-info"] in self._reverse_activity_type_mappings: activity.Type = self._reverse_activity_type_mappings[activity_data["detailed-sport-info"]] else: activity.Type = ActivityType.Other activity.StartTime = pytz.utc.localize(isodate.parse_datetime(activity_data["start-time"])) activity.EndTime = activity.StartTime + isodate.parse_duration(activity_data["duration"]) distance = activity_data["distance"] if "distance" in activity_data else None activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(distance) if distance else None) hr_data = activity_data["heart-rate"] if "heart-rate" in activity_data else None avg_hr = hr_data["average"] if "average" in hr_data else None max_hr = hr_data["maximum"] if "maximum" in hr_data else None activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=float(avg_hr) if avg_hr else None, max=float(max_hr) if max_hr else None)) calories = activity_data["calories"] if "calories" in activity_data else None activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=int(calories) if calories else None) activity.ServiceData = {"ActivityID": activity_data["id"]} logger.debug("\tActivity s/t {}: {}".format(activity.StartTime, activity.Type)) activity.CalculateUID() return activity
def _populateActivity(self, rawRecord): ''' Populate the 1st level of the activity object with all details required for UID from pulsstory API data ''' activity = UploadedActivity() # can stay local + naive here, recipient services can calculate TZ as required activity.Name = rawRecord["Name"] if "Name" in rawRecord else None activity.StartTime = datetime.strptime(rawRecord["StartTime"], "%Y-%m-%d %H:%M:%S") activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=float(rawRecord["Duration"])) activity.EndTime = activity.StartTime + timedelta( seconds=float(rawRecord["Duration"])) activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=rawRecord["Distance"]) if (activity.EndTime - activity.StartTime).total_seconds() > 0: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.KilometersPerHour, avg=activity.Stats.Distance.asUnits( ActivityStatisticUnit.Kilometers).Value / ((activity.EndTime - activity.StartTime).total_seconds() / 60 / 60)) activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilocalories, value=rawRecord["Energy"] if "Energy" in rawRecord else None) if rawRecord["Type"] in self._activityMappings: activity.Type = self._activityMappings[rawRecord["Type"]] activity.GPS = rawRecord["HasPath"] if "HasPath" in rawRecord else False activity.Stationary = rawRecord[ "HasPoints"] if "HasPoints" in rawRecord else True activity.Notes = rawRecord["Notes"] if "Notes" in rawRecord else None activity.Private = rawRecord[ "Private"] if "Private" in rawRecord else True activity.CalculateUID() return activity
def DownloadActivityList(self, serviceRecord, exhaustive=False): activities = [] exclusions = [] for act in self._getActivities(serviceRecord, exhaustive=exhaustive): activity = UploadedActivity() activity.StartTime = dateutil.parser.parse(act['startDateTimeLocal']) activity.EndTime = activity.StartTime + timedelta(seconds=act['duration']) _type = self._activityMappings.get(act['activityType']) if not _type: exclusions.append(APIExcludeActivity("Unsupported activity type %s" % act['activityType'], activity_id=act["activityId"], user_exception=UserException(UserExceptionType.Other))) activity.ServiceData = {"ActivityID": act['activityId']} activity.Type = _type activity.Notes = act['notes'] activity.GPS = bool(act.get('startLatitude')) activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=act['distance']) activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=act['calories']) if 'heartRateMin' in act: activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, min=act['heartRateMin'], max=act['heartRateMax'], avg=act['heartRateAverage']) activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=act['duration']) if 'temperature' in act: activity.Stats.Temperature = ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=act['temperature']) activity.CalculateUID() logger.debug("\tActivity s/t %s", activity.StartTime) activities.append(activity) return activities, exclusions
def _create_activity(self, data): activity = UploadedActivity() activity.Name = data.get("name") activity.StartTime = pytz.utc.localize(datetime.strptime(data.get("start_at"), "%Y-%m-%dT%H:%M:%SZ")) activity.EndTime = activity.StartTime + timedelta(0, float(data.get("duration"))) sport_id = data.get("sport_id") activity.Type = self._reverseActivityMappings.get(int(sport_id), ActivityType.Other) if sport_id else ActivityType.Other distance = data.get("distance") activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(distance) if distance else None) activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(data.get("total_time_in_seconds"))) avg_speed = data.get("average_speed") max_speed = data.get("max_speed") activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour, avg=float(avg_speed) if avg_speed else None, max=float(max_speed) if max_speed else None) avg_hr = data.get("average_heart_rate") max_hr = data.get("maximum_heart_rate") activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=float(avg_hr) if avg_hr else None, max=float(max_hr) if max_hr else None)) calories = data.get("calories") activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=int(calories) if calories else None) activity.ServiceData = {"ActivityID": data.get("id")} logger.debug("\tActivity s/t {}: {}".format(activity.StartTime, activity.Type)) activity.CalculateUID() return activity
def _populateActivity(self, rawRecord): ''' Populate the 1st level of the activity object with all details required for UID from RK API data ''' activity = UploadedActivity() # can stay local + naive here, recipient services can calculate TZ as required activity.StartTime = datetime.strptime(rawRecord["start_time"], "%a, %d %b %Y %H:%M:%S") activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=float(rawRecord["duration"])) # P. sure this is moving time activity.EndTime = activity.StartTime + timedelta( seconds=float(rawRecord["duration"]) ) # this is inaccurate with pauses - excluded from hash activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=rawRecord["total_distance"]) # I'm fairly sure this is how the RK calculation works. I remember I removed something exactly like this from ST.mobi, but I trust them more than I trust myself to get the speed right. if (activity.EndTime - activity.StartTime).total_seconds() > 0: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.KilometersPerHour, avg=activity.Stats.Distance.asUnits( ActivityStatisticUnit.Kilometers).Value / ((activity.EndTime - activity.StartTime).total_seconds() / 60 / 60)) activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilocalories, value=rawRecord["total_calories"] if "total_calories" in rawRecord else None) if rawRecord["type"] in self._activityMappings: activity.Type = self._activityMappings[rawRecord["type"]] activity.GPS = rawRecord["has_path"] activity.CalculateUID() return activity
def DownloadActivityList(self, serviceRecord, exhaustive=False): #https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?limit=20&start=0 page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) res = self._request_with_reauth(lambda session: session.get("https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}), serviceRecord) try: res = res.json() except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res_txt)) for act in res: activity = UploadedActivity() # stationary activities have movingDuration = None while non-gps static activities have 0.0 activity.Stationary = act["movingDuration"] is None activity.GPS = act["hasPolyline"] activity.Private = act["privacy"]["typeKey"] == "private" activity_name = act["activityName"] logger.debug("Name " + activity_name if activity_name is not None else "Untitled" + ":") if activity_name is not None and len(activity_name.strip()) and activity_name != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = activity_name activity_description = act["description"] if activity_description is not None and len(activity_description.strip()): activity.Notes = activity_description activity.StartTime = pytz.utc.localize(datetime.strptime(act["startTimeGMT"], "%Y-%m-%d %H:%M:%S")) if act["elapsedDuration"] is not None: activity.EndTime = activity.StartTime + timedelta(0, float(act["elapsedDuration"])/1000) elif act["duration"] is not None: activity.EndTime = activity.StartTime + timedelta(0, float(act["duration"])) else: # somehow duration is not defined. Set 1 second then. activity.EndTime = activity.StartTime + timedelta(0, 1) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) if "distance" in act and act["distance"] and float(act["distance"]) != 0: activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(act["distance"])) activity.Type = self._resolveActivityType(act["activityType"]["typeKey"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page)) if not exhaustive or len(res) == 0: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): session = self._get_session(serviceRecord) list_params = self._with_auth(session, {"count": 20, "offset": 1}) activities = [] exclusions = [] while True: list_resp = session.get("https://api.nike.com/me/sport/activities", params=list_params) list_resp = list_resp.json() for act in list_resp["data"]: activity = UploadedActivity() activity.ServiceData = {"ID": act["activityId"]} if act["status"] != "COMPLETE": exclusions.append( APIExcludeActivity( "Not complete", activity_id=act["activityId"], permanent=False, user_exception=UserException( UserExceptionType.LiveTracking))) continue activity.StartTime = dateutil.parser.parse( act["startTime"]).replace(tzinfo=pytz.utc) activity.EndTime = activity.StartTime + self._durationToTimespan( act["metricSummary"]["duration"]) tz_name = act["activityTimeZone"] # They say these are all IANA standard names - they aren't if tz_name in self._timezones: tz_name = self._timezones[tz_name] activity.TZ = pytz.timezone(tz_name) if act["activityType"] in self._activityMappings: activity.Type = self._activityMappings[act["activityType"]] activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Kilometers, value=float(act["metricSummary"]["distance"])) activity.Stats.Strides = ActivityStatistic( ActivityStatisticUnit.Strides, value=int(act["metricSummary"]["steps"])) activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilocalories, value=float(act["metricSummary"]["calories"])) activity.CalculateUID() activities.append(activity) if len(list_resp["data"]) == 0 or not exhaustive: break list_params["offset"] += list_params["count"] return activities, exclusions
def DownloadActivityList(self, svcRec, exhaustive=False): dbcl = self._getClient(svcRec) if not svcRec.Authorization["Full"]: syncRoot = "/" else: syncRoot = svcRec.Config["SyncRoot"] cache = cachedb.dropbox_cache.find_one({"ExternalID": svcRec.ExternalID}) if cache is None: cache = {"ExternalID": svcRec.ExternalID, "Structure": [], "Activities": {}} if "Structure" not in cache: cache["Structure"] = [] self._folderRecurse(cache["Structure"], dbcl, syncRoot) activities = [] exclusions = [] for dir in cache["Structure"]: for file in dir["Files"]: path = file["Path"] if svcRec.Authorization["Full"]: relPath = path.replace(syncRoot, "", 1) else: relPath = path.replace("/Apps/tapiriik/", "", 1) # dropbox api is meh api existing = [(k, x) for k, x in cache["Activities"].items() if x["Path"] == relPath] # path is relative to syncroot to reduce churn if they relocate it existing = existing[0] if existing else None if existing is not None: existUID, existing = existing if existing and existing["Rev"] == file["Rev"]: # don't need entire activity loaded here, just UID act = UploadedActivity() act.UID = existUID act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y %z") if "EndTime" in existing: # some cached activities may not have this, it is not essential act.EndTime = datetime.strptime(existing["EndTime"], "%H:%M:%S %d %m %Y %z") else: logger.debug("Retrieving %s (%s)" % (path, "outdated meta cache" if existing else "not in meta cache")) # get the full activity try: act, rev = self._getActivity(svcRec, dbcl, path) except APIExcludeActivity as e: logger.info("Encountered APIExcludeActivity %s" % str(e)) exclusions.append(e) continue del act.Laps act.Laps = [] # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM. cache["Activities"][act.UID] = {"Rev": rev, "Path": relPath, "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"), "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")} tagRes = self._tagActivity(relPath) act.ServiceData = {"Path": path, "Tagged":tagRes is not None} act.Type = tagRes if tagRes is not None else ActivityType.Other logger.debug("Activity s/t %s" % act.StartTime) activities.append(act) cachedb.dropbox_cache.update({"ExternalID": svcRec.ExternalID}, cache, upsert=True) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?limit=20&start=0 page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) res = self._request_with_reauth(lambda session: session.get("https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}), serviceRecord) try: res = res.json() except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res_txt)) for act in res: activity = UploadedActivity() # stationary activities have movingDuration = None while non-gps static activities have 0.0 activity.Stationary = act["movingDuration"] is None activity.GPS = act["hasPolyline"] activity.Private = act["privacy"]["typeKey"] == "private" activity_name = act["activityName"] logger.debug("Name " + activity_name if activity_name is not None else "Untitled" + ":") if activity_name is not None and len(activity_name.strip()) and activity_name != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = activity_name activity_description = act["description"] if activity_description is not None and len(activity_description.strip()): activity.Notes = activity_description activity.StartTime = pytz.utc.localize(datetime.strptime(act["startTimeGMT"], "%Y-%m-%d %H:%M:%S")) if act["elapsedDuration"] is not None: activity.EndTime = activity.StartTime + timedelta(0, float(act["elapsedDuration"])/1000) else: activity.EndTime = activity.StartTime + timedelta(0, float(act["duration"])) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) if "distance" in act and act["distance"] and float(act["distance"]) != 0: activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(act["distance"])) activity.Type = self._resolveActivityType(act["activityType"]["typeKey"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page)) if not exhaustive or len(res) == 0: break else: page += 1 return activities, exclusions
def _populateActivity(self, rawRecord): ''' Populate the 1st level of the activity object with all details required for UID from API data ''' activity = UploadedActivity() activity.StartTime = dateutil.parser.parse(rawRecord["start"]) activity.EndTime = activity.StartTime + timedelta(seconds=rawRecord["duration"]) activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=rawRecord["distance"]) activity.GPS = rawRecord["hasGps"] activity.Stationary = not rawRecord["hasGps"] activity.CalculateUID() return activity
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] before = earliestDate = None while True: logger.debug("Req with before=" + str(before) + "/" + str(earliestDate)) resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before}) self._logAPICall("list", (svcRecord.ExternalID, str(earliestDate)), resp.status_code == 401) if resp.status_code == 401: raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) earliestDate = None reqdata = resp.json() if not len(reqdata): break # No more activities to see for ride in reqdata: activity = UploadedActivity() activity.TZ = pytz.timezone(re.sub("^\([^\)]+\)\s*", "", ride["timezone"])) # Comes back as "(GMT -13:37) The Stuff/We Want"" activity.StartTime = pytz.utc.localize(datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ")) logger.debug("\tActivity s/t " + str(activity.StartTime)) if not earliestDate or activity.StartTime < earliestDate: earliestDate = activity.StartTime before = calendar.timegm(activity.StartTime.astimezone(pytz.utc).timetuple()) if ride["start_latlng"] is None or ride["end_latlng"] is None or ride["distance"] is None or ride["distance"] == 0: exclusions.append(APIExcludeActivity("No path", activityId=ride["id"])) logger.debug("\t\tNo pts") continue # stationary activity - no syncing for now activity.EndTime = activity.StartTime + timedelta(0, ride["elapsed_time"]) activity.UploadedTo = [{"Connection": svcRecord, "ActivityID": ride["id"]}] actType = [k for k, v in self._reverseActivityTypeMappings.items() if v == ride["type"]] if not len(actType): exclusions.append(APIExcludeActivity("Unsupported activity type %s" % ride["type"], activityId=ride["id"])) logger.debug("\t\tUnknown activity") continue activity.Type = actType[0] activity.Distance = ride["distance"] activity.Name = ride["name"] activity.Private = ride["private"] activity.AdjustTZ() activity.CalculateUID() activities.append(activity) if not exhaustive or not earliestDate: break return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): # grumble grumble strava api sucks grumble grumble # http://app.strava.com/api/v1/rides?athleteId=id activities = [] exclusions = [] before = earliestDate = None while True: resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before}) logger.debug("Req with before=" + str(before) + "/" + str(earliestDate)) earliestDate = None reqdata = resp.json() if not len(reqdata): break # No more activities to see for ride in reqdata: activity = UploadedActivity() activity.TZ = pytz.timezone(re.sub("^\([^\)]+\)\s*", "", ride["timezone"])) # Comes back as "(GMT -13:37) The Stuff/We Want"" activity.StartTime = pytz.utc.localize(datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ")) logger.debug("\tActivity s/t " + str(activity.StartTime)) if not earliestDate or activity.StartTime < earliestDate: earliestDate = activity.StartTime before = calendar.timegm(activity.StartTime.astimezone(pytz.utc).timetuple()) if ride["start_latlng"] is None or ride["end_latlng"] is None or ride["distance"] is None or ride["distance"] == 0: exclusions.append(APIExcludeActivity("No path", activityId=ride["id"])) continue # stationary activity - no syncing for now if ride["start_latlng"] == ride["end_latlng"]: exclusions.append(APIExcludeActivity("Only one waypoint", activityId=ride["id"])) continue # Only one waypoint, one would assume. activity.EndTime = activity.StartTime + timedelta(0, ride["elapsed_time"]) activity.UploadedTo = [{"Connection": svcRecord, "ActivityID": ride["id"]}] actType = [k for k, v in self._reverseActivityTypeMappings.items() if v == ride["type"]] if not len(actType): exclusions.append(APIExcludeActivity("Unsupported activity type", activityId=ride["id"])) continue activity.Type = actType[0] activity.Distance = ride["distance"] activity.Name = ride["name"] activity.AdjustTZ() activity.CalculateUID() activities.append(activity) if not exhaustive or not earliestDate: break return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 cookies = self._get_cookies(record=serviceRecord) page = 1 pageSz = 50 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) res = requests.get("http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}, cookies=cookies) res = res.json()["results"] if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] if "beginLatitude" not in act or "endLatitude" not in act or (act["beginLatitude"] is act["endLatitude"] and act["beginLongitude"] is act["endLongitude"]): exclusions.append(APIExcludeActivity("No points", activityId=act["activityId"])) continue if "sumDistance" not in act: exclusions.append(APIExcludeActivity("No distance", activityId=act["activityId"])) continue activity = UploadedActivity() try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled": activity.Name = act["activityName"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time. activity.Distance = float(act["sumDistance"]["value"]) * (1.60934 if act["sumDistance"]["uom"] == "mile" else 1) * 1000 # In meters... activity.Type = self._resolveActivityType(act["activityType"]["key"]) activity.CalculateUID() activity.UploadedTo = [{"Connection": serviceRecord, "ActivityID": act["activityId"]}] activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): activities = [] session = self._get_session(record=serviceRecord) session.headers.update({"Accept": "application/json"}) workouts_resp = session.get("https://api.trainerroad.com/api/careerworkouts") if workouts_resp.status_code != 200: if workouts_resp.status_code == 401: raise APIException("Invalid login", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) raise APIException("Workout listing error") cached_record = cachedb.trainerroad_meta.find_one({"ExternalID": serviceRecord.ExternalID}) if not cached_record: cached_workout_meta = {} else: cached_workout_meta = cached_record["Workouts"] workouts = workouts_resp.json() for workout in workouts: # Un/f their API doesn't provide the start/end times in the list response # So we need to pull the extra data, if it's not already cached workout_id = str(workout["Id"]) # Mongo doesn't do non-string keys if workout_id not in cached_workout_meta: meta_resp = session.get("https://api.trainerroad.com/api/careerworkouts?guid=%s" % workout["Guid"]) # We don't need everything full_meta = meta_resp.json() meta = {key: full_meta[key] for key in ["WorkoutDate", "WorkoutName", "WorkoutNotes", "TotalMinutes", "TotalKM", "AvgWatts", "Kj"]} cached_workout_meta[workout_id] = meta else: meta = cached_workout_meta[workout_id] activity = UploadedActivity() activity.ServiceData = {"ID": int(workout_id)} activity.Name = meta["WorkoutName"] activity.Notes = meta["WorkoutNotes"] activity.Type = ActivityType.Cycling # Everything's in UTC activity.StartTime = dateutil.parser.parse(meta["WorkoutDate"]).replace(tzinfo=pytz.utc) activity.EndTime = activity.StartTime + timedelta(minutes=meta["TotalMinutes"]) activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=meta["TotalKM"]) activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, avg=meta["AvgWatts"]) activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilojoules, value=meta["Kj"]) activity.Stationary = False activity.GPS = False activity.CalculateUID() activities.append(activity) cachedb.trainerroad_meta.update({"ExternalID": serviceRecord.ExternalID}, {"ExternalID": serviceRecord.ExternalID, "Workouts": cached_workout_meta}, upsert=True) return activities, []
def DownloadActivityList(self, serviceRecord, exhaustive=False): logger.debug("Checking motivato premium state") self._applyPaymentState(serviceRecord) logger.debug("Motivato DownloadActivityList") session = self._get_session(record=serviceRecord) activities = [] exclusions = [] self._rate_limit() retried_auth = False #headers = {'X-App-With-Tracks': "true"} headers = {} res = session.post(self._urlRoot + "/api/workouts/sync", headers=headers) if res.status_code == 403 and not retried_auth: retried_auth = True session = self._get_session(serviceRecord, skip_cache=True) try: respList = res.json(); except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in Motivato list resp: %s" % res.status_code) for actInfo in respList: if "duration" in actInfo: duration = self._durationToSeconds(actInfo["duration"]) else: continue activity = UploadedActivity() if "time_start" in actInfo["metas"]: startTimeStr = actInfo["training_at"] + " " + actInfo["metas"]["time_start"] else: startTimeStr = actInfo["training_at"] + " 00:00:00" activity.StartTime = self._parseDateTime(startTimeStr) activity.EndTime = self._parseDateTime(startTimeStr) + timedelta(seconds=duration) activity.Type = self._reverseActivityMappings[actInfo["discipline_id"]] activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=duration) if "distance" in actInfo: activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(actInfo["distance"])) #activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerSecond, value=1.0/float(actInfo["metas"]["pace"])) activity.ServiceData={"WorkoutID": int(actInfo["id"])} activity.CalculateUID() logger.debug("Generated UID %s" % activity.UID) activities.append(activity) return activities, exclusions
def _populateActivity(self, rawRecord): ''' Populate the 1st level of the activity object with all details required for UID from RK API data ''' activity = UploadedActivity() # can stay local + naive here, recipient services can calculate TZ as required activity.StartTime = datetime.strptime(rawRecord["start_time"], "%a, %d %b %Y %H:%M:%S") activity.EndTime = activity.StartTime + timedelta(0, round(rawRecord["duration"])) # this is inaccurate with pauses - excluded from hash activity.Distance = rawRecord["total_distance"] if rawRecord["type"] in self._activityMappings: activity.Type = self._activityMappings[rawRecord["type"]] activity.CalculateUID() return activity
def DownloadActivityList(self, svcRec, exhaustive=False): dbcl = self._getClient(svcRec) if not svcRec.Authorization["Full"]: syncRoot = "/" else: syncRoot = svcRec.Config["SyncRoot"] cache = cachedb.dropbox_cache.find_one({"ExternalID": svcRec.ExternalID}) if cache is None: cache = {"ExternalID": svcRec.ExternalID, "Structure": [], "Activities": {}} if "Structure" not in cache: cache["Structure"] = [] self._folderRecurse(cache["Structure"], dbcl, syncRoot) activities = [] exclusions = [] for dir in cache["Structure"]: for file in dir["Files"]: path = file["Path"] if svcRec.Authorization["Full"]: relPath = path.replace(syncRoot, "", 1) else: relPath = path.replace("/Apps/tapiriik/", "", 1) # dropbox api is meh api existing = [(k, x) for k, x in cache["Activities"].items() if x["Path"] == relPath] # path is relative to syncroot to reduce churn if they relocate it existing = existing[0] if existing else None if existing is not None: existUID, existing = existing if existing and existing["Rev"] == file["Rev"]: # don't need entire activity loaded here, just UID act = UploadedActivity() act.UID = existUID act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y %z") if "EndTime" in existing: # some cached activities may not have this, it is not essential act.EndTime = datetime.strptime(existing["EndTime"], "%H:%M:%S %d %m %Y %z") else: # get the full activity try: act, rev = self._getActivity(dbcl, path) except APIExcludeActivity as e: exclusions.append(e) continue cache["Activities"][act.UID] = {"Rev": rev, "Path": relPath, "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"), "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")} act.UploadedTo = [{"Connection": svcRec, "Path": path}] tagRes = self._tagActivity(relPath) act.Tagged = tagRes is not None act.Type = tagRes if tagRes is not None else ActivityType.Other activities.append(act) cachedb.dropbox_cache.update({"ExternalID": svcRec.ExternalID}, cache, upsert=True) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #TODO find out polar session timeout session = self._get_session(serviceRecord) activities = [] exclusions = [] date_format = "{d.day}.{d.month}.{d.year}" end_date = datetime.now() + timedelta(days=1.5) start_date = date(1961, 4, 12) if exhaustive else end_date - timedelta(days=60) params = { "startDate": date_format.format(d=start_date), "endDate": date_format.format(d=end_date) } res = session.get("https://polarpersonaltrainer.com/user/calendar/inc/listview.ftl", params=params) bs = BeautifulSoup(res.text, "html.parser") for activity_row in bs.select("tr[class^=listRow]"): data_cells = activity_row.findAll("td") info_cell = 0 date_cell = 4 time_cell = 3 result_type_cell = 5 sport_type_cell = 6 type_data = data_cells[info_cell].find("input", {"name": "calendarItemTypes"}) # Skip fitness data whatever if type_data["value"] == "OptimizedExercise": activity = UploadedActivity() id = data_cells[info_cell].find("input", {"name": "calendarItem"})["value"] name = data_cells[info_cell].find("input", {"name": "calendarItemName"})["value"] activity.ExternalID = id activity.Name = name time_text = "{} {}".format(data_cells[date_cell].contents[0], data_cells[time_cell].contents[0]) activity.StartTime = pytz.utc.localize(datetime.strptime(time_text, "%d.%m.%Y %H:%M")) result_type_text = data_cells[result_type_cell].contents[0] if "Strength Training Result" in result_type_text: activity.Type = ActivityType.StrengthTraining # This type of activity always stationary activity.Stationary = True else: type_text = data_cells[sport_type_cell].contents[0] activity.Type = self._reverseActivityMappings.get(type_text.lower(), ActivityType.Other) logger.debug("\tActivity s/t {}: {}".format(activity.StartTime, activity.Type)) activity.CalculateUID() activities.append(activity) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): # http://ridewithgps.com/users/1/trips.json?limit=200&order_by=created_at&order_dir=asc # offset also supported page = 1 pageSz = 50 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) # TODO: take advantage of their nice ETag support params = {"offset": (page - 1) * pageSz, "limit": pageSz} params = self._add_auth_params(params, record=serviceRecord) res = requests.get("http://ridewithgps.com/users/{}/trips.json".format(serviceRecord.ExternalID), params=params) res = res.json() total_pages = math.ceil(int(res["results_count"]) / pageSz) for act in res["results"]: if "first_lat" not in act or "last_lat" not in act: exclusions.append(APIExcludeActivity("No points", activityId=act["activityId"])) continue if "distance" not in act: exclusions.append(APIExcludeActivity("No distance", activityId=act["activityId"])) continue activity = UploadedActivity() activity.TZ = pytz.timezone(act["time_zone"]) logger.debug("Name " + act["name"] + ":") if len(act["name"].strip()): activity.Name = act["name"] activity.StartTime = pytz.utc.localize(datetime.strptime(act["departed_at"], "%Y-%m-%dT%H:%M:%SZ")) activity.EndTime = activity.StartTime + timedelta(seconds=self._duration_to_seconds(act["duration"])) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() activity.Distance = float(act["distance"]) # This value is already in meters... # Activity type is not implemented yet in RWGPS results; we will assume cycling, though perhaps "OTHER" wouuld be correct activity.Type = ActivityType.Cycling activity.CalculateUID() activity.UploadedTo = [{"Connection": serviceRecord, "ActivityID": act["id"]}] activities.append(activity) logger.debug("Finished page {} of {}".format(page, total_pages)) if not exhaustive or total_pages == page or total_pages == 0: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] url = self.SingletrackerDomain + "getRidesByUserId" extID = svcRecord.ExternalID payload = {"userId": extID} headers = { 'content-type': "application/json", 'cache-control': "no-cache", } response = requests.post(url, data=json.dumps(payload), headers=headers) try: reqdata = response.json() except ValueError: raise APIException("Failed parsing Singletracker list response %s - %s" % (resp.status_code, resp.text)) for ride in reqdata: activity = UploadedActivity() activity.StartTime = datetime.strptime( datetime.utcfromtimestamp(ride["startTime"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") if "stopTime" in ride: activity.EndTime = datetime.strptime( datetime.utcfromtimestamp(ride["stopTime"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") activity.ServiceData = {"ActivityID": ride["rideId"], "Manual": "False"} activity.Name = ride["trackName"] logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, activity.Name)) activity.Type = ActivityType.MountainBiking if "totalDistance" in ride: activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=ride["totalDistance"]) if "avgSpeed" in ride: activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.MetersPerSecond, avg=ride["avgSpeed"]) activity.Notes = None activity.GPS = True activity.Private = False activity.Stationary = False # True = no sensor data activity.CalculateUID() activities.append(activity) return activities, exclusions
def _populateActivity(self, rawRecord): ''' Populate the 1st level of the activity object with all details required for UID from RK API data ''' activity = UploadedActivity() # can stay local + naive here, recipient services can calculate TZ as required activity.StartTime = datetime.strptime(rawRecord["start_time"], "%a, %d %b %Y %H:%M:%S") activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Time, value=timedelta(0, float(rawRecord["duration"]))) # P. sure this is moving time activity.EndTime = activity.StartTime + activity.Stats.MovingTime.Value # this is inaccurate with pauses - excluded from hash activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=rawRecord["total_distance"]) # I'm fairly sure this is how the RK calculation works. I remember I removed something exactly like this from ST.mobi, but I trust them more than I trust myself to get the speed right. if (activity.EndTime - activity.StartTime).total_seconds() > 0: activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour, avg=activity.Stats.Distance.asUnits(ActivityStatisticUnit.Kilometers).Value / ((activity.EndTime - activity.StartTime).total_seconds() / 60 / 60)) activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=rawRecord["total_calories"] if "total_calories" in rawRecord else None) if rawRecord["type"] in self._activityMappings: activity.Type = self._activityMappings[rawRecord["type"]] activity.CalculateUID() return activity
def DownloadActivityList(self, serviceRecord, exhaustive=False): session = self._get_session(serviceRecord) list_params = self._with_auth(session, {"count": 20, "offset": 1}) activities = [] exclusions = [] while True: list_resp = session.get("https://api.nike.com/me/sport/activities", params=list_params) list_resp = list_resp.json() for act in list_resp["data"]: activity = UploadedActivity() activity.ServiceData = {"ID": act["activityId"]} if act["status"] != "COMPLETE": exclusions.append(APIExcludeActivity("Not complete", activityId=act["activityId"], permanent=False, userException=UserException(UserExceptionType.LiveTracking))) continue activity.StartTime = dateutil.parser.parse(act["startTime"]).replace(tzinfo=pytz.utc) activity.EndTime = activity.StartTime + self._durationToTimespan(act["metricSummary"]["duration"]) tz_name = act["activityTimeZone"] # They say these are all IANA standard names - they aren't if tz_name in self._timezones: tz_name = self._timezones[tz_name] activity.TZ = pytz.timezone(tz_name) if act["activityType"] in self._activityMappings: activity.Type = self._activityMappings[act["activityType"]] activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(act["metricSummary"]["distance"])) activity.Stats.Strides = ActivityStatistic(ActivityStatisticUnit.Strides, value=int(act["metricSummary"]["steps"])) activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=float(act["metricSummary"]["calories"])) activity.CalculateUID() activities.append(activity) if len(list_resp["data"]) == 0 or not exhaustive: break list_params["offset"] += list_params["count"] return activities, exclusions
def _populateActivity(self, rawRecord): ''' Populate the 1st level of the activity object with all details required for UID from pulsstory API data ''' activity = UploadedActivity() # can stay local + naive here, recipient services can calculate TZ as required activity.Name = rawRecord["Name"] if "Name" in rawRecord else None activity.StartTime = datetime.strptime(rawRecord["StartTime"], "%Y-%m-%d %H:%M:%S") activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(rawRecord["Duration"])) activity.EndTime = activity.StartTime + timedelta(seconds=float(rawRecord["Duration"])) activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=rawRecord["Distance"]) if (activity.EndTime - activity.StartTime).total_seconds() > 0: activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour, avg=activity.Stats.Distance.asUnits(ActivityStatisticUnit.Kilometers).Value / ((activity.EndTime - activity.StartTime).total_seconds() / 60 / 60)) activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=rawRecord["Energy"] if "Energy" in rawRecord else None) if rawRecord["Type"] in self._activityMappings: activity.Type = self._activityMappings[rawRecord["Type"]] activity.GPS = rawRecord["HasPath"] if "HasPath" in rawRecord else False activity.Stationary = rawRecord["HasPoints"] if "HasPoints" in rawRecord else True activity.Notes = rawRecord["Notes"] if "Notes" in rawRecord else None activity.Private = rawRecord["Private"] if "Private" in rawRecord else True activity.CalculateUID() return activity
def _create_post(self, data): post = UploadedActivity() post.Type = ActivityType.Report post.Stationary = True post_xml = data.find("post") post.Name = post_xml.get("title") post.NotesExt = post_xml.get("formatted_body") post.StartTime = pytz.utc.localize(datetime.strptime(post_xml.get("created_at"), "%Y-%m-%dT%H:%M:%SZ")) #need to set EndTime for consistency post.EndTime = post.StartTime post.ServiceData = {"ActivityID": post_xml.get("id")} if int(post_xml.get("photos_count")) > 0: for photo_xml in data.findall("photos/photo"): post.PhotoUrls.append({"id": photo_xml.get("id"), "url": photo_xml.get("image_original")}) logger.debug("\tPost s/t {}: {}".format(post.StartTime, post.Type)) post.CalculateUID() return post
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] now = datetime.now() prev = now - timedelta(6 * 365 / 12) period = [] aperiod = "%s%02d-%s%02d" % (prev.year, prev.month, now.year, now.month) period.append(aperiod) if exhaustive: for _ in range(20): now = prev prev = now - timedelta(6 * 365 / 12) aperiod = "%s%02d-%s%02d" % (prev.year, prev.month, now.year, now.month) period.append(aperiod) for dateInterval in period: headers = self._getAuthHeaders(svcRecord) resp = requests.get(self.ApiEndpoint + "/users/" + str(svcRecord.ExternalID) + "/activities.xml?date=" + dateInterval, headers=headers) if resp.status_code == 400: logger.info(resp.content) raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) if resp.status_code == 401: logger.info(resp.content) raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) if resp.status_code == 403: logger.info(resp.content) raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) root = xml.fromstring(resp.content) logger.info("\t\t nb activity : " + str(len(root.findall('.//ID')))) for ride in root.iter('ACTIVITY'): activity = UploadedActivity() activity.TZ = pytz.timezone("UTC") startdate = ride.find('.//STARTDATE').text + ride.find( './/TIMEZONE').text datebase = parse(startdate) activity.StartTime = datebase #pytz.utc.localize(datebase) activity.ServiceData = { "ActivityID": ride.find('ID').text, "Manual": ride.find('MANUAL').text } logger.info("\t\t DecathlonCoach Activity ID : " + ride.find('ID').text) if ride.find('SPORTID' ).text not in self._reverseActivityTypeMappings: exclusions.append( APIExcludeActivity("Unsupported activity type %s" % ride.find('SPORTID').text, activity_id=ride.find('ID').text, user_exception=UserException( UserExceptionType.Other))) logger.info( "\t\tDecathlonCoach Unknown activity, sport id " + ride.find('SPORTID').text + " is not mapped") continue activity.Type = self._reverseActivityTypeMappings[ride.find( 'SPORTID').text] for val in ride.iter('VALUE'): if val.get('id') == self._unitMap["duration"]: activity.EndTime = activity.StartTime + timedelta( 0, int(val.text)) if val.get('id') == self._unitMap["distance"]: activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=int(val.text)) if val.get('id') == self._unitMap["kcal"]: activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilocalories, value=int(val.text)) if val.get('id') == self._unitMap["speedaverage"]: meterperhour = int(val.text) meterpersecond = meterperhour / 3600 activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=meterpersecond, max=None) if ride.find('LIBELLE' ).text == "" or ride.find('LIBELLE').text is None: txtdate = startdate.split(' ') activity.Name = "Sport DecathlonCoach " + txtdate[0] else: activity.Name = ride.find('LIBELLE').text activity.Private = False activity.Stationary = ride.find('MANUAL').text activity.GPS = ride.find('ABOUT').find('TRACK').text activity.AdjustTZ() activity.CalculateUID() activities.append(activity) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): oauthSession = self._oauthSession(serviceRecord) activities = [] exclusions = [] page_url = "https://api.endomondo.com/api/1/workouts" while True: resp = oauthSession.get(page_url) try: respList = resp.json()["data"] except ValueError: raise APIException("Error decoding activity list resp %s %s" % (resp.status_code, resp.text)) for actInfo in respList: activity = UploadedActivity() activity.StartTime = self._parseDate(actInfo["start_time"]) print("Activity s/t %s" % activity.StartTime) if "is_tracking" in actInfo and actInfo["is_tracking"]: exclusions.append(APIExcludeActivity("Not complete", activityId=actInfo["id"], permanent=False, userException=UserException(UserExceptionType.LiveTracking))) continue if "end_time" in actInfo: activity.EndTime = self._parseDate(actInfo["end_time"]) if actInfo["sport"] in self._activityMappings: activity.Type = self._activityMappings[actInfo["sport"]] # "duration" is timer time if "duration_total" in actInfo: activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(actInfo["duration_total"])) if "distance_total" in actInfo: activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(actInfo["distance_total"])) if "calories_total" in actInfo: activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=float(actInfo["calories_total"])) activity.Stats.Elevation = ActivityStatistic(ActivityStatisticUnit.Meters) if "altitude_max" in actInfo: activity.Stats.Elevation.Max = float(actInfo["altitude_max"]) if "altitude_min" in actInfo: activity.Stats.Elevation.Min = float(actInfo["altitude_min"]) if "total_ascent" in actInfo: activity.Stats.Elevation.Gain = float(actInfo["total_ascent"]) if "total_descent" in actInfo: activity.Stats.Elevation.Loss = float(actInfo["total_descent"]) activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour) if "speed_max" in actInfo: activity.Stats.Speed.Max = float(actInfo["speed_max"]) if "heart_rate_avg" in actInfo: activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=float(actInfo["heart_rate_avg"])) if "heart_rate_max" in actInfo: activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=float(actInfo["heart_rate_max"]))) if "cadence_avg" in actInfo: activity.Stats.Cadence = ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=int(actInfo["cadence_avg"])) if "cadence_max" in actInfo: activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, max=int(actInfo["cadence_max"]))) if "title" in actInfo: activity.Name = actInfo["title"] activity.ServiceData = {"WorkoutID": int(actInfo["id"])} activity.CalculateUID() activities.append(activity) paging = resp.json()["paging"] if "next" not in paging or not paging["next"] or not exhaustive: break else: page_url = paging["next"] return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): # http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 cookies = self._get_cookies(record=serviceRecord) page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) self._rate_limit() res = requests.get( "http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}, cookies=cookies, ) res = res.json()["results"] if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] if "sumDistance" not in act: exclusions.append(APIExcludeActivity("No distance", activityId=act["activityId"])) continue activity = UploadedActivity() if ( "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act ): # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = True else: activity.Stationary = False try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if ( len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled" ): # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize( datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"]) / 1000) ) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta( 0, round(float(act["sumElapsedDuration"]["value"])) ) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta( minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]), ) else: activity.EndTime = pytz.utc.localize( datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"]) / 1000) ) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time. activity.Stats.Distance = ActivityStatistic( self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"]) ) def mapStat(gcKey, statKey, type, useSourceUnits=False): nonlocal activity, act if gcKey in act: value = float(act[gcKey]["value"]) if math.isinf(value): return # GC returns the minimum speed as "-Infinity" instead of 0 some times :S activity.Stats.__dict__[statKey].update( ActivityStatistic(self._unitMap[act[gcKey]["uom"]], **({type: value})) ) if useSourceUnits: activity.Stats.__dict__[statKey] = activity.Stats.__dict__[statKey].asUnits( self._unitMap[act[gcKey]["uom"]] ) if "sumMovingDuration" in act: activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Time, value=timedelta(seconds=float(act["sumMovingDuration"]["value"])) ) if "sumDuration" in act: activity.Stats.TimerTime = ActivityStatistic( ActivityStatisticUnit.Time, value=timedelta( minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]), ), ) mapStat( "minSpeed", "Speed", "min", useSourceUnits=True ) # We need to suppress conversion here, so we can fix the pace-speed issue below mapStat("maxSpeed", "Speed", "max", useSourceUnits=True) mapStat("weightedMeanSpeed", "Speed", "avg", useSourceUnits=True) mapStat("minAirTemperature", "Temperature", "min") mapStat("maxAirTemperature", "Temperature", "max") mapStat("weightedMeanAirTemperature", "Temperature", "avg") mapStat("sumEnergy", "Energy", "value") mapStat("maxHeartRate", "HR", "max") mapStat("weightedMeanHeartRate", "HR", "avg") mapStat("maxRunCadence", "RunCadence", "max") mapStat("weightedMeanRunCadence", "RunCadence", "avg") mapStat("maxBikeCadence", "Cadence", "max") mapStat("weightedMeanBikeCadence", "Cadence", "avg") mapStat("minPower", "Power", "min") mapStat("maxPower", "Power", "max") mapStat("weightedMeanPower", "Power", "avg") mapStat("minElevation", "Elevation", "min") mapStat("maxElevation", "Elevation", "max") mapStat("gainElevation", "Elevation", "gain") mapStat("lossElevation", "Elevation", "loss") # In Garmin Land, max can be smaller than min for this field :S if ( activity.Stats.Power.Max is not None and activity.Stats.Power.Min is not None and activity.Stats.Power.Min > activity.Stats.Power.Max ): activity.Stats.Power.Min = None # To get it to match what the user sees in GC. if activity.Stats.RunCadence.Max is not None: activity.Stats.RunCadence.Max *= 2 if activity.Stats.RunCadence.Average is not None: activity.Stats.RunCadence.Average *= 2 # GC incorrectly reports pace measurements as kph/mph when they are in fact in min/km or min/mi if "minSpeed" in act: if ":" in act["minSpeed"]["withUnitAbbr"] and activity.Stats.Speed.Min: activity.Stats.Speed.Min = 60 / activity.Stats.Speed.Min if "maxSpeed" in act: if ":" in act["maxSpeed"]["withUnitAbbr"] and activity.Stats.Speed.Max: activity.Stats.Speed.Max = 60 / activity.Stats.Speed.Max if "weightedMeanSpeed" in act: if ":" in act["weightedMeanSpeed"]["withUnitAbbr"] and activity.Stats.Speed.Average: activity.Stats.Speed.Average = 60 / activity.Stats.Speed.Average # Similarly, they do weird stuff with HR at times - %-of-max and zones # ...and we can't just fix these, so we have to calculate it after the fact (blegh) recalcHR = False if "maxHeartRate" in act: if "%" in act["maxHeartRate"]["withUnitAbbr"] or "z" in act["maxHeartRate"]["withUnitAbbr"]: activity.Stats.HR.Max = None recalcHR = True if "weightedMeanHeartRate" in act: if ( "%" in act["weightedMeanHeartRate"]["withUnitAbbr"] or "z" in act["weightedMeanHeartRate"]["withUnitAbbr"] ): activity.Stats.HR.Average = None recalcHR = True activity.Type = self._resolveActivityType(act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": act["activityId"], "RecalcHR": recalcHR} activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): # http://ridewithgps.com/users/1/trips.json?limit=200&order_by=created_at&order_dir=asc # offset also supported page = 1 pageSz = 50 activities = [] exclusions = [] while True: logger.debug("Req with " + str({ "start": (page - 1) * pageSz, "limit": pageSz })) # TODO: take advantage of their nice ETag support params = {"offset": (page - 1) * pageSz, "limit": pageSz} params = self._add_auth_params(params, record=serviceRecord) res = requests.get( "http://ridewithgps.com/users/{}/trips.json".format( serviceRecord.ExternalID), params=params) res = res.json() total_pages = math.ceil(int(res["results_count"]) / pageSz) for act in res["results"]: if "first_lat" not in act or "last_lat" not in act: exclusions.append( APIExcludeActivity("No points", activityId=act["activityId"], userException=UserException( UserExceptionType.Corrupt))) continue if "distance" not in act: exclusions.append( APIExcludeActivity("No distance", activityId=act["activityId"], userException=UserException( UserExceptionType.Corrupt))) continue activity = UploadedActivity() activity.TZ = pytz.timezone(act["time_zone"]) logger.debug("Name " + act["name"] + ":") if len(act["name"].strip()): activity.Name = act["name"] activity.StartTime = pytz.utc.localize( datetime.strptime(act["departed_at"], "%Y-%m-%dT%H:%M:%SZ")) activity.EndTime = activity.StartTime + timedelta( seconds=self._duration_to_seconds(act["duration"])) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() activity.Distance = float( act["distance"]) # This value is already in meters... # Activity type is not implemented yet in RWGPS results; we will assume cycling, though perhaps "OTHER" wouuld be correct activity.Type = ActivityType.Cycling activity.CalculateUID() activity.UploadedTo = [{ "Connection": serviceRecord, "ActivityID": act["id"] }] activities.append(activity) logger.debug("Finished page {} of {}".format(page, total_pages)) if not exhaustive or total_pages == page or total_pages == 0: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): """ GET List of Activities as JSON File URL: http://app.velohero.com/export/workouts/json Parameters: user = username pass = password date_from = YYYY-MM-DD date_to = YYYY-MM-DD """ activities = [] exclusions = [] discoveredWorkoutIds = [] params = self._add_auth_params({}, record=serviceRecord) limitDateFormat = "%Y-%m-%d" if exhaustive: listEnd = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in listStart = datetime(day=1, month=1, year=1980) # The beginning of time else: listEnd = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in listStart = listEnd - timedelta(days=20) # Doesn't really matter params.update({"date_from": listStart.strftime(limitDateFormat), "date_to": listEnd.strftime(limitDateFormat)}) logger.debug("Requesting %s to %s" % (listStart, listEnd)) res = requests.get(self._urlRoot + "/export/workouts/json", params=params) if res.status_code != 200: if res.status_code == 403: raise APIException("Invalid login", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) raise APIException("Unable to retrieve activity list") res.raise_for_status() try: res = res.json() except ValueError: raise APIException("Could not decode activity list") if "workouts" not in res: raise APIException("No activities") for workout in res["workouts"]: workoutId = int(workout["id"]) if workoutId in discoveredWorkoutIds: continue # There's the possibility of query overlap discoveredWorkoutIds.append(workoutId) if workout["file"] is not "1": logger.debug("Skip workout with ID: " + str(workoutId) + " (no file)") continue # Skip activity without samples (no PWX export) activity = UploadedActivity() logger.debug("Workout ID: " + str(workoutId)) # Duration (dur_time) duration = self._durationToSeconds(workout["dur_time"]) activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=duration) # Start time (date_ymd, start_time) startTimeStr = workout["date_ymd"] + " " + workout["start_time"] activity.StartTime = self._parseDateTime(startTimeStr) # End time (date_ymd, start_time) + dur_time activity.EndTime = self._parseDateTime(startTimeStr) + timedelta(seconds=duration) # Sport (sport_id) if workout["sport_id"] in self._reverseActivityMappings: activity.Type = self._reverseActivityMappings[workout["sport_id"]] else: activity.Type = ActivityType.Other # Distance (dist_km) activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(workout["dist_km"])) # Workout is hidden activity.Private = workout["hide"] == "1" activity.ServiceData = {"workoutId": workoutId} activity.CalculateUID() activities.append(activity) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({ "start": (page - 1) * pageSz, "limit": pageSz })) res = self._request_with_reauth( serviceRecord, lambda session: session.get( "https://connect.garmin.com/modern/proxy/activity-search-service-1.0/json/activities", params={ "start": (page - 1) * pageSz, "limit": pageSz })) try: res = res.json()["results"] except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res.text)) if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] activity = UploadedActivity() # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act activity.GPS = "endLatitude" in act activity.Private = act["privacy"]["key"] == "private" try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset( float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip( )) and act["activityName"][ "value"] != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize( datetime.utcfromtimestamp( float(act["beginTimestamp"]["millis"]) / 1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta( 0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta( minutes=float(act["sumDuration"] ["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"] ["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize( datetime.utcfromtimestamp( float(act["endTimestamp"]["millis"]) / 1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() if "sumDistance" in act and float( act["sumDistance"]["value"]) != 0: activity.Stats.Distance = ActivityStatistic( self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])) if "device" in act and act["device"]["key"] != "unknown": devId = DeviceIdentifier.FindMatchingIdentifierOfType( DeviceIdentifierType.GC, {"Key": act["device"]["key"]}) ver_split = act["device"]["key"].split(".") ver_maj = None ver_min = None if len(ver_split) == 4: # 2.90.0.0 ver_maj = int(ver_split[0]) ver_min = int(ver_split[1]) activity.Device = Device(devId, verMaj=ver_maj, verMin=ver_min) activity.Type = self._resolveActivityType( act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): ns = self._tp_ns activities = [] exclusions = [] reqData = self._authData(svcRecord) limitDateFormat = "%d %B %Y" if exhaustive: listEnd = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in listStart = datetime(day=1, month=1, year=1980) # The beginning of time else: listEnd = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in listStart = listEnd - timedelta(days=20) # Doesn't really matter lastActivityDay = None discoveredWorkoutIds = [] while True: reqData.update({"startDate": listStart.strftime(limitDateFormat), "endDate": listEnd.strftime(limitDateFormat)}) print("Requesting %s to %s" % (listStart, listEnd)) resp = requests.post("https://www.trainingpeaks.com/tpwebservices/service.asmx/GetWorkoutsForAthlete", data=reqData) xresp = etree.XML(resp.content) for xworkout in xresp: activity = UploadedActivity() workoutId = xworkout.find("tpw:WorkoutId", namespaces=ns).text workoutDayEl = xworkout.find("tpw:WorkoutDay", namespaces=ns) startTimeEl = xworkout.find("tpw:StartTime", namespaces=ns) workoutDay = dateutil.parser.parse(workoutDayEl.text) startTime = dateutil.parser.parse(startTimeEl.text) if startTimeEl is not None and startTimeEl.text else None if lastActivityDay is None or workoutDay.replace(tzinfo=None) > lastActivityDay: lastActivityDay = workoutDay.replace(tzinfo=None) if startTime is None: continue # Planned but not executed yet. activity.StartTime = startTime endTimeEl = xworkout.find("tpw:TimeTotalInSeconds", namespaces=ns) if not endTimeEl.text: exclusions.append(APIExcludeActivity("Activity has no duration", activity_id=workoutId, user_exception=UserException(UserExceptionType.Corrupt))) continue activity.EndTime = activity.StartTime + timedelta(seconds=float(endTimeEl.text)) distEl = xworkout.find("tpw:DistanceInMeters", namespaces=ns) if distEl.text: activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(distEl.text)) # PWX is damn near comprehensive, no need to fill in any of the other statisitcs here, really if workoutId in discoveredWorkoutIds: continue # There's the possibility of query overlap, if there are multiple activities on a single day that fall across the query return limit discoveredWorkoutIds.append(workoutId) workoutTypeEl = xworkout.find("tpw:WorkoutTypeDescription", namespaces=ns) if workoutTypeEl.text: if workoutTypeEl.text == "Day Off": continue # TrainingPeaks has some weird activity types... if workoutTypeEl.text not in self._workoutTypeMappings: exclusions.append(APIExcludeActivity("Activity type %s unknown" % workoutTypeEl.text, activity_id=workoutId, user_exception=UserException(UserExceptionType.Corrupt))) continue activity.Type = self._workoutTypeMappings[workoutTypeEl.text] activity.ServiceData = {"WorkoutID": workoutId} activity.CalculateUID() activities.append(activity) if not exhaustive: break # Since TP only lets us query by date range, to get full activity history we need to query successively smaller ranges if len(xresp): if listStart == lastActivityDay: break # This wouldn't work if you had more than #MaxQueryReturn activities on that day - but that number is probably 50+ listStart = lastActivityDay else: break # We're done return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): logger.debug("Checking motivato premium state") self._applyPaymentState(serviceRecord) logger.debug("Motivato DownloadActivityList") session = self._get_session(record=serviceRecord) activities = [] exclusions = [] self._rate_limit() retried_auth = False #headers = {'X-App-With-Tracks': "true"} headers = {} res = session.post(self._urlRoot + "/api/workouts/sync", headers=headers) if res.status_code == 403 and not retried_auth: retried_auth = True session = self._get_session(serviceRecord, skip_cache=True) try: respList = res.json() except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in Motivato list resp: %s" % res.status_code) for actInfo in respList: if "duration" in actInfo: duration = self._durationToSeconds(actInfo["duration"]) else: continue activity = UploadedActivity() if "time_start" in actInfo["metas"]: startTimeStr = actInfo["training_at"] + " " + actInfo["metas"][ "time_start"] else: startTimeStr = actInfo["training_at"] + " 00:00:00" activity.StartTime = self._parseDateTime(startTimeStr) activity.EndTime = self._parseDateTime(startTimeStr) + timedelta( seconds=duration) activity.Type = self._reverseActivityMappings[ actInfo["discipline_id"]] activity.Stats.TimerTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=duration) if "distance" in actInfo: activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Kilometers, value=float(actInfo["distance"])) #activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerSecond, value=1.0/float(actInfo["metas"]["pace"])) activity.ServiceData = {"WorkoutID": int(actInfo["id"])} activity.CalculateUID() logger.debug("Generated UID %s" % activity.UID) activities.append(activity) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): def mapStatTriple(act, stats_obj, key, units): if "%s_max" % key in act and act["%s_max" % key]: stats_obj.update(ActivityStatistic(units, max=float(act["%s_max" % key]))) if "%s_min" % key in act and act["%s_min" % key]: stats_obj.update(ActivityStatistic(units, min=float(act["%s_min" % key]))) if "%s_avg" % key in act and act["%s_avg" % key]: stats_obj.update(ActivityStatistic(units, avg=float(act["%s_avg" % key]))) # http://ridewithgps.com/users/1/trips.json?limit=200&order_by=created_at&order_dir=asc # offset also supported activities = [] exclusions = [] # They don't actually support paging right now, for whatever reason params = self._add_auth_params({}, record=serviceRecord) res = requests.get("http://ridewithgps.com/users/{}/trips.json".format(serviceRecord.ExternalID), params=params) res = res.json() # Apparently some API users are seeing this new result format - I'm not if type(res) is dict: res = res.get("results", []) if res == []: return [], [] # No activities for act in res: if "distance" not in act: exclusions.append(APIExcludeActivity("No distance", activity_id=act["id"], user_exception=UserException(UserExceptionType.Corrupt))) continue if "duration" not in act or not act["duration"]: exclusions.append(APIExcludeActivity("No duration", activity_id=act["id"], user_exception=UserException(UserExceptionType.Corrupt))) continue activity = UploadedActivity() logger.debug("Name " + act["name"] + ":") if len(act["name"].strip()): activity.Name = act["name"] if len(act["description"].strip()): activity.Notes = act["description"] activity.GPS = act["is_gps"] activity.Stationary = not activity.GPS # I think # 0 = public, 1 = private, 2 = friends activity.Private = act["visibility"] == 1 activity.StartTime = dateutil.parser.parse(act["departed_at"]) try: activity.TZ = pytz.timezone(act["time_zone"]) except pytz.exceptions.UnknownTimeZoneError: # Sometimes the time_zone returned isn't quite what we'd like it # So, just pull the offset from the datetime if isinstance(activity.StartTime.tzinfo, tzutc): activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value. else: activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo.utcoffset(activity.StartTime).total_seconds() / 60) activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ) # Overwrite dateutil's sillyness activity.EndTime = activity.StartTime + timedelta(seconds=self._duration_to_seconds(act["duration"])) logger.debug("Activity s/t " + str(activity.StartTime)) activity.AdjustTZ() activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, float(act["distance"])) mapStatTriple(act, activity.Stats.Power, "watts", ActivityStatisticUnit.Watts) mapStatTriple(act, activity.Stats.Speed, "speed", ActivityStatisticUnit.KilometersPerHour) mapStatTriple(act, activity.Stats.Cadence, "cad", ActivityStatisticUnit.RevolutionsPerMinute) mapStatTriple(act, activity.Stats.HR, "hr", ActivityStatisticUnit.BeatsPerMinute) if "elevation_gain" in act and act["elevation_gain"]: activity.Stats.Elevation.update(ActivityStatistic(ActivityStatisticUnit.Meters, gain=float(act["elevation_gain"]))) if "elevation_loss" in act and act["elevation_loss"]: activity.Stats.Elevation.update(ActivityStatistic(ActivityStatisticUnit.Meters, loss=float(act["elevation_loss"]))) # Activity type is not implemented yet in RWGPS results; we will assume cycling, though perhaps "OTHER" wouuld be correct activity.Type = ActivityType.Cycling activity.CalculateUID() activity.ServiceData = {"ActivityID": act["id"]} activities.append(activity) return activities, exclusions
def DownloadActivityList(self, svcRec, exhaustive=False): dbcl = self._getClient(svcRec) if not svcRec.Authorization["Full"]: syncRoot = "/" else: syncRoot = svcRec.Config["SyncRoot"] cache = cachedb.dropbox_cache.find_one( {"ExternalID": svcRec.ExternalID}) if cache is None: cache = { "ExternalID": svcRec.ExternalID, "Structure": [], "Activities": {} } if "Structure" not in cache: cache["Structure"] = [] self._folderRecurse(cache["Structure"], dbcl, syncRoot) activities = [] exclusions = [] for dir in cache["Structure"]: for file in dir["Files"]: path = file["Path"] if svcRec.Authorization["Full"]: relPath = path.replace(syncRoot, "", 1) else: relPath = path.replace("/Apps/tapiriik/", "", 1) # dropbox api is meh api hashedRelPath = self._hash_path(relPath) if hashedRelPath in cache["Activities"]: existing = cache["Activities"][hashedRelPath] else: existing = None if not existing: # Continue to use the old records keyed by UID where possible existing = [ (k, x) for k, x in cache["Activities"].items() if "Path" in x and x["Path"] == relPath ] # path is relative to syncroot to reduce churn if they relocate it existing = existing[0] if existing else None if existing is not None: existUID, existing = existing existing["UID"] = existUID if existing and existing["Rev"] == file["Rev"]: # don't need entire activity loaded here, just UID act = UploadedActivity() act.UID = existing["UID"] try: act.StartTime = datetime.strptime( existing["StartTime"], "%H:%M:%S %d %m %Y %z") except: act.StartTime = datetime.strptime( existing["StartTime"], "%H:%M:%S %d %m %Y" ) # Exactly one user has managed to break %z :S if "EndTime" in existing: # some cached activities may not have this, it is not essential act.EndTime = datetime.strptime( existing["EndTime"], "%H:%M:%S %d %m %Y %z") else: logger.debug("Retrieving %s (%s)" % (path, "outdated meta cache" if existing else "not in meta cache")) # get the full activity try: act, rev = self._getActivity(svcRec, dbcl, path) except APIExcludeActivity as e: logger.info("Encountered APIExcludeActivity %s" % str(e)) exclusions.append(strip_context(e)) continue try: act.EnsureTZ() except: pass # We tried. if hasattr(act, "OriginatedFromTapiriik" ) and not act.CountTotalWaypoints(): # This is one of the files created when TCX export was hopelessly broken for non-GPS activities. # Right now, no activities in dropbox from tapiriik should be devoid of waypoints - since dropbox doesn't receive stationary activities # In the future when this changes, will obviously have to modify this code to also look at modification dates or similar. if ".tcx.summary-data" in path: logger.info("...summary file already moved") else: logger.info("...moving summary-only file") dbcl.file_move( path, path.replace(".tcx", ".tcx.summary-data")) continue # DON'T include in listing - it'll be regenerated del act.Laps act.Laps = [ ] # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM. cache["Activities"][hashedRelPath] = { "Rev": rev, "UID": act.UID, "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"), "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z") } tagRes = self._tagActivity(relPath) act.ServiceData = {"Path": path, "Tagged": tagRes is not None} act.Type = tagRes if tagRes is not None else ActivityType.Other logger.debug("Activity s/t %s" % act.StartTime) activities.append(act) if "_id" in cache: cachedb.dropbox_cache.save(cache) else: cachedb.dropbox_cache.insert(cache) return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] before = earliestDate = None while True: logger.debug("Req with before=" + str(before) + "/" + str(earliestDate)) resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before}) if resp.status_code == 401: raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) earliestDate = None reqdata = resp.json() if not len(reqdata): break # No more activities to see for ride in reqdata: activity = UploadedActivity() activity.TZ = pytz.timezone( re.sub("^\([^\)]+\)\s*", "", ride["timezone"]) ) # Comes back as "(GMT -13:37) The Stuff/We Want"" activity.StartTime = pytz.utc.localize( datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ")) logger.debug("\tActivity s/t " + str(activity.StartTime)) if not earliestDate or activity.StartTime < earliestDate: earliestDate = activity.StartTime before = calendar.timegm( activity.StartTime.astimezone(pytz.utc).timetuple()) if ride["start_latlng"] is None or ride[ "end_latlng"] is None or ride[ "distance"] is None or ride["distance"] == 0: exclusions.append( APIExcludeActivity("No path", activityId=ride["id"])) logger.debug("\t\tNo pts") continue # stationary activity - no syncing for now activity.EndTime = activity.StartTime + timedelta( 0, ride["elapsed_time"]) activity.UploadedTo = [{ "Connection": svcRecord, "ActivityID": ride["id"] }] actType = [ k for k, v in self._reverseActivityTypeMappings.items() if v == ride["type"] ] if not len(actType): exclusions.append( APIExcludeActivity("Unsupported activity type %s" % ride["type"], activityId=ride["id"])) logger.debug("\t\tUnknown activity") continue activity.Type = actType[0] activity.Distance = ride["distance"] activity.Name = ride["name"] activity.Private = ride["private"] activity.AdjustTZ() activity.CalculateUID() activities.append(activity) if not exhaustive or not earliestDate: break return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) res = self._request_with_reauth(serviceRecord, lambda session: session.get("https://connect.garmin.com/modern/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz})) try: res = res.json()["results"] except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res.text)) if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] activity = UploadedActivity() # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act activity.GPS = "endLatitude" in act activity.Private = act["privacy"]["key"] == "private" try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() if "sumDistance" in act and float(act["sumDistance"]["value"]) != 0: activity.Stats.Distance = ActivityStatistic(self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])) if "device" in act and act["device"]["key"] != "unknown": devId = DeviceIdentifier.FindMatchingIdentifierOfType(DeviceIdentifierType.GC, {"Key": act["device"]["key"]}) ver_split = act["device"]["key"].split(".") ver_maj = None ver_min = None if len(ver_split) == 4: # 2.90.0.0 ver_maj = int(ver_split[0]) ver_min = int(ver_split[1]) activity.Device = Device(devId, verMaj=ver_maj, verMin=ver_min) activity.Type = self._resolveActivityType(act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, svcRec, exhaustive=False): dbcl = self._getClient(svcRec) if not svcRec.Authorization["Full"]: syncRoot = "/" else: syncRoot = svcRec.Config["SyncRoot"] # Dropbox API v2 doesn't like / as root. if syncRoot == "/": syncRoot = "" # New Dropbox API prefers path_lower, it would seem. syncRoot = syncRoot.lower() # There used to be a massive affair going on here to cache the folder structure locally. # Dropbox API 2.0 doesn't support the hashes I need for that. # Oh well. Throw that data out now. Well, don't load it at all. cache = cachedb.dropbox_cache.find_one( {"ExternalID": svcRec.ExternalID}, { "ExternalID": True, "Activities": True }) if cache is None: cache = {"ExternalID": svcRec.ExternalID, "Activities": {}} try: list_result = dbcl.files_list_folder(syncRoot, recursive=True) except dropbox.exceptions.DropboxException as e: self._raiseDbException(e) def cache_writeback(): if "_id" in cache: cachedb.dropbox_cache.save(cache) else: insert_result = cachedb.dropbox_cache.insert(cache) cache["_id"] = insert_result.inserted_id activities = [] exclusions = [] discovered_activity_cache_keys = set() while True: for entry in list_result.entries: if not hasattr(entry, "rev"): # Not a file -> we don't care. continue path = entry.path_lower if not path.endswith(".gpx") and not path.endswith(".tcx"): # Not an activity file -> we don't care. continue if svcRec.Authorization["Full"]: relPath = path.replace(syncRoot, "", 1) else: relPath = path.replace("/Apps/tapiriik/", "", 1) # dropbox api is meh api hashedRelPath = self._hash_path(relPath) discovered_activity_cache_keys.add(hashedRelPath) if hashedRelPath in cache["Activities"]: existing = cache["Activities"][hashedRelPath] else: existing = None if existing and existing["Rev"] == entry.rev: # don't need entire activity loaded here, just UID act = UploadedActivity() act.UID = existing["UID"] try: act.StartTime = datetime.strptime( existing["StartTime"], "%H:%M:%S %d %m %Y %z") except: act.StartTime = datetime.strptime( existing["StartTime"], "%H:%M:%S %d %m %Y" ) # Exactly one user has managed to break %z :S if "EndTime" in existing: # some cached activities may not have this, it is not essential act.EndTime = datetime.strptime( existing["EndTime"], "%H:%M:%S %d %m %Y %z") else: logger.debug("Retrieving %s (%s)" % (path, "outdated meta cache" if existing else "not in meta cache")) # get the full activity try: act, rev = self._getActivity(svcRec, dbcl, path) except APIExcludeActivity as e: logger.info("Encountered APIExcludeActivity %s" % str(e)) exclusions.append(strip_context(e)) continue try: act.EnsureTZ() except: pass # We tried. act.Laps = [ ] # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM. cache["Activities"][hashedRelPath] = { "Rev": rev, "UID": act.UID, "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"), "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z") } # Incrementally update the cache db. # Otherwise, if we crash later on in listing # (due to OOM or similar), we'll never make progress on this account. cache_writeback() tagRes = self._tagActivity(relPath) act.ServiceData = {"Path": path, "Tagged": tagRes is not None} act.Type = tagRes if tagRes is not None else ActivityType.Other logger.debug("Activity s/t %s" % act.StartTime) activities.append(act) # Perform pagination. if list_result.has_more: list_result = dbcl.files_list_folder_continue( list_result.cursor) else: break # Drop deleted activities' records from cache. all_activity_cache_keys = set(cache["Activities"].keys()) for deleted_key in all_activity_cache_keys - discovered_activity_cache_keys: del cache["Activities"][deleted_key] cache_writeback() return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): headers = self._getAuthHeaders(serviceRecord) activities = [] exclusions = [] pageUri = self.OpenFitEndpoint + "/fitnessActivities.json" activity_tz_cache_raw = cachedb.sporttracks_meta_cache.find_one( {"ExternalID": serviceRecord.ExternalID}) activity_tz_cache_raw = activity_tz_cache_raw if activity_tz_cache_raw else { "Activities": [] } activity_tz_cache = dict([(x["ActivityURI"], x["TZ"]) for x in activity_tz_cache_raw["Activities"] ]) while True: logger.debug("Req against " + pageUri) res = requests.get(pageUri, headers=headers) try: res = res.json() except ValueError: raise APIException( "Could not decode activity list response %s %s" % (res.status_code, res.text)) for act in res["items"]: activity = UploadedActivity() activity.ServiceData = {"ActivityURI": act["uri"]} if len(act["name"].strip()): activity.Name = act["name"] # Longstanding ST.mobi bug causes it to return negative partial-hour timezones as "-2:-30" instead of "-2:30" fixed_start_time = re.sub(r":-(\d\d)", r":\1", act["start_time"]) activity.StartTime = dateutil.parser.parse(fixed_start_time) if isinstance(activity.StartTime.tzinfo, tzutc): activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value. else: activity.TZ = pytz.FixedOffset( activity.StartTime.tzinfo.utcoffset( activity.StartTime).total_seconds() / 60 ) # Convert the dateutil lame timezones into pytz awesome timezones. activity.StartTime = activity.StartTime.replace( tzinfo=activity.TZ) activity.EndTime = activity.StartTime + timedelta( seconds=float(act["duration"])) activity.Stats.TimerTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=float(act["duration"] )) # OpenFit says this excludes paused times. # Sometimes activities get returned with a UTC timezone even when they are clearly not in UTC. if activity.TZ == pytz.utc: if act["uri"] in activity_tz_cache: activity.TZ = pytz.FixedOffset( activity_tz_cache[act["uri"]]) else: # So, we get the first location in the activity and calculate the TZ from that. try: firstLocation = self._downloadActivity( serviceRecord, activity, returnFirstLocation=True) except APIExcludeActivity: pass else: try: activity.CalculateTZ(firstLocation, recalculate=True) except: # We tried! pass else: activity.AdjustTZ() finally: activity_tz_cache[ act["uri"]] = activity.StartTime.utcoffset( ).total_seconds() / 60 logger.debug("Activity s/t " + str(activity.StartTime)) activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=float(act["total_distance"])) types = [x.strip().lower() for x in act["type"].split(":")] types.reverse( ) # The incoming format is like "walking: hiking" and we want the most specific first activity.Type = None for type_key in types: if type_key in self._activityMappings: activity.Type = self._activityMappings[type_key] break if not activity.Type: exclusions.append( APIExcludeActivity("Unknown activity type %s" % act["type"], activity_id=act["uri"], user_exception=UserException( UserExceptionType.Other))) continue activity.CalculateUID() activities.append(activity) if not exhaustive or "next" not in res or not len(res["next"]): break else: pageUri = res["next"] logger.debug("Writing back meta cache") cachedb.sporttracks_meta_cache.update( {"ExternalID": serviceRecord.ExternalID}, { "ExternalID": serviceRecord.ExternalID, "Activities": [{ "ActivityURI": k, "TZ": v } for k, v in activity_tz_cache.items()] }, upsert=True) return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] before = earliestDate = None while True: if before is not None and before < 0: break # Caused by activities that "happened" before the epoch. We generally don't care about those activities... logger.debug("Req with before=" + str(before) + "/" + str(earliestDate)) self._globalRateLimit() resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before}) if resp.status_code == 401: raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) earliestDate = None reqdata = resp.json() if not len(reqdata): break # No more activities to see for ride in reqdata: activity = UploadedActivity() activity.TZ = pytz.timezone( re.sub("^\([^\)]+\)\s*", "", ride["timezone"]) ) # Comes back as "(GMT -13:37) The Stuff/We Want"" activity.StartTime = pytz.utc.localize( datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ")) logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, ride["name"])) if not earliestDate or activity.StartTime < earliestDate: earliestDate = activity.StartTime before = calendar.timegm( activity.StartTime.astimezone(pytz.utc).timetuple()) activity.EndTime = activity.StartTime + timedelta( 0, ride["elapsed_time"]) activity.ServiceData = { "ActivityID": ride["id"], "Manual": ride["manual"] } if ride["type"] not in self._reverseActivityTypeMappings: exclusions.append( APIExcludeActivity("Unsupported activity type %s" % ride["type"], activity_id=ride["id"], user_exception=UserException( UserExceptionType.Other))) logger.debug("\t\tUnknown activity") continue activity.Type = self._reverseActivityTypeMappings[ride["type"]] activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=ride["distance"]) if "max_speed" in ride or "average_speed" in ride: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=ride["average_speed"] if "average_speed" in ride else None, max=ride["max_speed"] if "max_speed" in ride else None) activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=ride["moving_time"] if "moving_time" in ride and ride["moving_time"] > 0 else None ) # They don't let you manually enter this, and I think it returns 0 for those activities. # Strava doesn't handle "timer time" to the best of my knowledge - although they say they do look at the FIT total_timer_time field, so...? if "average_watts" in ride: activity.Stats.Power = ActivityStatistic( ActivityStatisticUnit.Watts, avg=ride["average_watts"]) if "average_heartrate" in ride: activity.Stats.HR.update( ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=ride["average_heartrate"])) if "max_heartrate" in ride: activity.Stats.HR.update( ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=ride["max_heartrate"])) if "average_cadence" in ride: activity.Stats.Cadence.update( ActivityStatistic( ActivityStatisticUnit.RevolutionsPerMinute, avg=ride["average_cadence"])) if "average_temp" in ride: activity.Stats.Temperature.update( ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=ride["average_temp"])) if "calories" in ride: activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilocalories, value=ride["calories"]) activity.Name = ride["name"] activity.Private = ride["private"] activity.Stationary = ride["manual"] activity.GPS = ("start_latlng" in ride) and (ride["start_latlng"] is not None) activity.AdjustTZ() activity.CalculateUID() activities.append(activity) if not exhaustive or not earliestDate: break return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): """ GET List of Activities as JSON File URL: https://app.velohero.com/export/workouts/json Parameters: user = username pass = password date_from = YYYY-MM-DD date_to = YYYY-MM-DD """ activities = [] exclusions = [] discoveredWorkoutIds = [] params = self._add_auth_params({}, record=serviceRecord) limitDateFormat = "%Y-%m-%d" if exhaustive: listEnd = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in listStart = datetime(day=1, month=1, year=1980) # The beginning of time else: listEnd = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in listStart = listEnd - timedelta(days=20) # Doesn't really matter params.update({"date_from": listStart.strftime(limitDateFormat), "date_to": listEnd.strftime(limitDateFormat)}) logger.debug("Requesting %s to %s" % (listStart, listEnd)) res = requests.get(self._urlRoot + "/export/workouts/json", headers=self._obligatory_headers, params=params) if res.status_code != 200: if res.status_code == 403: raise APIException("Invalid login", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) raise APIException("Unable to retrieve activity list") res.raise_for_status() try: res = res.json() except ValueError: raise APIException("Could not decode activity list") if "workouts" not in res: raise APIException("No activities") for workout in res["workouts"]: workoutId = int(workout["id"]) if workoutId in discoveredWorkoutIds: continue # There's the possibility of query overlap discoveredWorkoutIds.append(workoutId) if workout["file"] is not "1": logger.debug("Skip workout with ID: " + str(workoutId) + " (no file)") continue # Skip activity without samples (no PWX export) activity = UploadedActivity() logger.debug("Workout ID: " + str(workoutId)) # Duration (dur_time) duration = self._durationToSeconds(workout["dur_time"]) activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=duration) # Start time (date_ymd, start_time) startTimeStr = workout["date_ymd"] + " " + workout["start_time"] activity.StartTime = self._parseDateTime(startTimeStr) # End time (date_ymd, start_time) + dur_time activity.EndTime = self._parseDateTime(startTimeStr) + timedelta(seconds=duration) # Sport (sport_id) if workout["sport_id"] in self._reverseActivityMappings: activity.Type = self._reverseActivityMappings[workout["sport_id"]] else: activity.Type = ActivityType.Other # Distance (dist_km) activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(workout["dist_km"])) # Workout is hidden activity.Private = workout["hide"] == "1" activity.ServiceData = {"workoutId": workoutId} activity.CalculateUID() activities.append(activity) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): activities = [] session = self._get_session(record=serviceRecord) session.headers.update({"Accept": "application/json"}) workouts_resp = session.get( "https://api.trainerroad.com/api/careerworkouts") if workouts_resp.status_code != 200: if workouts_resp.status_code == 401: raise APIException("Invalid login", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) raise APIException("Workout listing error") cached_record = cachedb.trainerroad_meta.find_one( {"ExternalID": serviceRecord.ExternalID}) if not cached_record: cached_workout_meta = {} else: cached_workout_meta = cached_record["Workouts"] workouts = workouts_resp.json() for workout in workouts: # Un/f their API doesn't provide the start/end times in the list response # So we need to pull the extra data, if it's not already cached workout_id = str(workout["Id"]) # Mongo doesn't do non-string keys if workout_id not in cached_workout_meta: meta_resp = session.get( "https://api.trainerroad.com/api/careerworkouts?guid=%s" % workout["Guid"]) # We don't need everything full_meta = meta_resp.json() meta = { key: full_meta[key] for key in [ "WorkoutDate", "WorkoutName", "WorkoutNotes", "TotalMinutes", "TotalKM", "AvgWatts", "Kj" ] } cached_workout_meta[workout_id] = meta else: meta = cached_workout_meta[workout_id] activity = UploadedActivity() activity.ServiceData = {"ID": int(workout_id)} activity.Name = meta["WorkoutName"] activity.Notes = meta["WorkoutNotes"] activity.Type = ActivityType.Cycling # Everything's in UTC activity.StartTime = dateutil.parser.parse( meta["WorkoutDate"]).replace(tzinfo=pytz.utc) activity.EndTime = activity.StartTime + timedelta( minutes=meta["TotalMinutes"]) activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Kilometers, value=meta["TotalKM"]) activity.Stats.Power = ActivityStatistic( ActivityStatisticUnit.Watts, avg=meta["AvgWatts"]) activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilojoules, value=meta["Kj"]) activity.Stationary = False activity.GPS = False activity.CalculateUID() activities.append(activity) cachedb.trainerroad_meta.update( {"ExternalID": serviceRecord.ExternalID}, { "ExternalID": serviceRecord.ExternalID, "Workouts": cached_workout_meta }, upsert=True) return activities, []
def DownloadActivityList(self, svcRec, exhaustive=False): dbcl = self._getClient(svcRec) if not svcRec.Authorization["Full"]: syncRoot = "/" else: syncRoot = svcRec.Config["SyncRoot"] cache = cachedb.dropbox_cache.find_one({"ExternalID": svcRec.ExternalID}) if cache is None: cache = {"ExternalID": svcRec.ExternalID, "Structure": [], "Activities": {}} if "Structure" not in cache: cache["Structure"] = [] self._folderRecurse(cache["Structure"], dbcl, syncRoot) activities = [] exclusions = [] for dir in cache["Structure"]: for file in dir["Files"]: path = file["Path"] if svcRec.Authorization["Full"]: relPath = path.replace(syncRoot, "", 1) else: relPath = path.replace("/Apps/tapiriik/", "", 1) # dropbox api is meh api hashedRelPath = self._hash_path(relPath) if hashedRelPath in cache["Activities"]: existing = cache["Activities"][hashedRelPath] else: existing = None if not existing: # Continue to use the old records keyed by UID where possible existing = [ (k, x) for k, x in cache["Activities"].items() if "Path" in x and x["Path"] == relPath ] # path is relative to syncroot to reduce churn if they relocate it existing = existing[0] if existing else None if existing is not None: existUID, existing = existing existing["UID"] = existUID if existing and existing["Rev"] == file["Rev"]: # don't need entire activity loaded here, just UID act = UploadedActivity() act.UID = existing["UID"] try: act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y %z") except: act.StartTime = datetime.strptime( existing["StartTime"], "%H:%M:%S %d %m %Y" ) # Exactly one user has managed to break %z :S if "EndTime" in existing: # some cached activities may not have this, it is not essential act.EndTime = datetime.strptime(existing["EndTime"], "%H:%M:%S %d %m %Y %z") else: logger.debug( "Retrieving %s (%s)" % (path, "outdated meta cache" if existing else "not in meta cache") ) # get the full activity try: act, rev = self._getActivity(svcRec, dbcl, path) except APIExcludeActivity as e: logger.info("Encountered APIExcludeActivity %s" % str(e)) exclusions.append(strip_context(e)) continue try: act.EnsureTZ() except: pass # We tried. if hasattr(act, "OriginatedFromTapiriik") and not act.CountTotalWaypoints(): # This is one of the files created when TCX export was hopelessly broken for non-GPS activities. # Right now, no activities in dropbox from tapiriik should be devoid of waypoints - since dropbox doesn't receive stationary activities # In the future when this changes, will obviously have to modify this code to also look at modification dates or similar. if ".tcx.summary-data" in path: logger.info("...summary file already moved") else: logger.info("...moving summary-only file") dbcl.file_move(path, path.replace(".tcx", ".tcx.summary-data")) continue # DON'T include in listing - it'll be regenerated del act.Laps act.Laps = ( [] ) # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM. cache["Activities"][hashedRelPath] = { "Rev": rev, "UID": act.UID, "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"), "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z"), } tagRes = self._tagActivity(relPath) act.ServiceData = {"Path": path, "Tagged": tagRes is not None} act.Type = tagRes if tagRes is not None else ActivityType.Other logger.debug("Activity s/t %s" % act.StartTime) activities.append(act) if "_id" in cache: cachedb.dropbox_cache.save(cache) else: cachedb.dropbox_cache.insert(cache) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): activities = [] exclusions = [] earliestDate = None earliestFirstPageDate = None paged = False while True: before = "" if earliestDate is None else earliestDate.astimezone(pytz.utc).strftime("%Y-%m-%d %H:%M:%S UTC") params = {"authToken": serviceRecord.Authorization["AuthToken"], "maxResults": 45, "before": before} logger.debug("Req with " + str(params)) response = requests.get("http://api.mobile.endomondo.com/mobile/api/workout/list", params=params) if response.status_code != 200: if response.status_code == 401 or response.status_code == 403: raise APIAuthorizationException("No authorization to retrieve activity list") raise APIException("Unable to retrieve activity list " + str(response)) data = response.json() for act in data["data"]: startTime = pytz.utc.localize(datetime.strptime(act["start_time"], "%Y-%m-%d %H:%M:%S UTC")) if earliestDate is None or startTime < earliestDate: # probably redundant, I would assume it works out the TZes... earliestDate = startTime logger.debug("activity pre") if not act["has_points"]: logger.warning("\t no pts") exclusions.append(APIExcludeActivity("No points", activityId=act["id"])) continue # it'll break strava, which needs waypoints to find TZ. Meh if "tracking" in act and act["tracking"]: logger.warning("\t tracking") exclusions.append(APIExcludeActivity("In progress", activityId=act["id"], permanent=False)) continue # come back once they've completed the activity activity = UploadedActivity() activity.StartTime = startTime activity.EndTime = activity.StartTime + timedelta(0, round(act["duration_sec"])) logger.debug("\tActivity s/t " + str(activity.StartTime)) # attn service makers: why #(*%$ can't you all agree to use naive local time. So much simpler. cachedTrackData = cachedb.endomondo_activity_cache.find_one({"TrackID": act["id"]}) if cachedTrackData is None: data = self._downloadRawTrackRecord(serviceRecord, act["id"]) self._populateActivityFromTrackRecord(activity, data, minimumWaypoints=True) cachedTrackData = {"Owner": serviceRecord.ExternalID, "TrackID": act["id"], "Data": data, "StartTime": activity.StartTime} if not paged or AGGRESSIVE_CACHE: # Don't cache stuff that we won't need in the immediate future. cachedb.endomondo_activity_cache.insert(cachedTrackData) else: self._populateActivityFromTrackRecord(activity, cachedTrackData["Data"], minimumWaypoints=True) activity.Waypoints = [] if int(act["sport"]) in self._activityMappings: activity.Type = self._activityMappings[int(act["sport"])] activity.UploadedTo = [{"Connection": serviceRecord, "ActivityID": act["id"], "ActivityData": cachedTrackData["Data"]}] activity.CalculateUID() activities.append(activity) if not paged: earliestFirstPageDate = earliestDate if not exhaustive or ("more" in data and data["more"] is False): break else: paged = True if not AGGRESSIVE_CACHE: cachedb.endomondo_activity_cache.remove({"Owner": serviceRecord.ExternalID, "$or":[{"StartTime":{"$lt": earliestFirstPageDate}}, {"StartTime":{"$exists": False}}]}) return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): ns = self._tp_ns activities = [] exclusions = [] reqData = self._authData(svcRecord) limitDateFormat = "%d %B %Y" if exhaustive: listEnd = datetime.now() + timedelta( days=1.5) # Who knows which TZ it's in listStart = datetime(day=1, month=1, year=1980) # The beginning of time else: listEnd = datetime.now() + timedelta( days=1.5) # Who knows which TZ it's in listStart = listEnd - timedelta(days=20) # Doesn't really matter lastActivityDay = None discoveredWorkoutIds = [] while True: reqData.update({ "startDate": listStart.strftime(limitDateFormat), "endDate": listEnd.strftime(limitDateFormat) }) print("Requesting %s to %s" % (listStart, listEnd)) resp = requests.post( "https://www.trainingpeaks.com/tpwebservices/service.asmx/GetWorkoutsForAthlete", data=reqData) xresp = etree.XML(resp.content) for xworkout in xresp: activity = UploadedActivity() workoutId = xworkout.find("tpw:WorkoutId", namespaces=ns).text workoutDayEl = xworkout.find("tpw:WorkoutDay", namespaces=ns) startTimeEl = xworkout.find("tpw:StartTime", namespaces=ns) workoutDay = dateutil.parser.parse(workoutDayEl.text) startTime = dateutil.parser.parse( startTimeEl.text ) if startTimeEl is not None and startTimeEl.text else None if lastActivityDay is None or workoutDay.replace( tzinfo=None) > lastActivityDay: lastActivityDay = workoutDay.replace(tzinfo=None) if startTime is None: continue # Planned but not executed yet. activity.StartTime = startTime endTimeEl = xworkout.find("tpw:TimeTotalInSeconds", namespaces=ns) if not endTimeEl.text: exclusions.append( APIExcludeActivity("Activity has no duration", activity_id=workoutId, user_exception=UserException( UserExceptionType.Corrupt))) continue activity.EndTime = activity.StartTime + timedelta( seconds=float(endTimeEl.text)) distEl = xworkout.find("tpw:DistanceInMeters", namespaces=ns) if distEl.text: activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=float(distEl.text)) # PWX is damn near comprehensive, no need to fill in any of the other statisitcs here, really if workoutId in discoveredWorkoutIds: continue # There's the possibility of query overlap, if there are multiple activities on a single day that fall across the query return limit discoveredWorkoutIds.append(workoutId) workoutTypeEl = xworkout.find("tpw:WorkoutTypeDescription", namespaces=ns) if workoutTypeEl.text: if workoutTypeEl.text == "Day Off": continue # TrainingPeaks has some weird activity types... if workoutTypeEl.text not in self._workoutTypeMappings: exclusions.append( APIExcludeActivity("Activity type %s unknown" % workoutTypeEl.text, activity_id=workoutId, user_exception=UserException( UserExceptionType.Corrupt))) continue activity.Type = self._workoutTypeMappings[ workoutTypeEl.text] activity.ServiceData = {"WorkoutID": workoutId} activity.CalculateUID() activities.append(activity) if not exhaustive: break # Since TP only lets us query by date range, to get full activity history we need to query successively smaller ranges if len(xresp): if listStart == lastActivityDay: break # This wouldn't work if you had more than #MaxQueryReturn activities on that day - but that number is probably 50+ listStart = lastActivityDay else: break # We're done return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] before = earliestDate = None # define low parameter # get Garmin info # WARNING : BE CAREFULL ABOUT DATE FILTER # date filter of this request will follow this process : # - fetching activity with uploaded date between upload_start_time & upload_end_time # - return matched activities # For example : # if you upload at 20-05-2019 an activity into Garmin with start date 01-01-2019 # and you use upload_start_time=20-05-2019 & upload_end_time=21-05-2019 # the 01-01-2019 will be return # So we download activities from upload date service_id = svcRecord._id user = db.users.find_one({ 'ConnectedServices': { '$elemMatch': { 'ID': service_id, 'Service': self.ID } } }) afterDateObj = datetime.now() - timedelta(days=1) afterDate = afterDateObj.strftime("%Y-%m-%d") afterDate_tstmp = str(int(afterDateObj.timestamp())) date_now = datetime.now() now_tstmp = str(int(date_now.timestamp())) userID = svcRecord.ExternalID oauth_token = svcRecord.Authorization.get('OAuthToken') user_access_token = svcRecord.Authorization.get('AccessToken') user_access_token_secret = svcRecord.Authorization.get( 'AccessTokenSecret') logging.info("\t Download Garmin Health activities since : " + afterDate) logging.info("\t Building signin for activities summary") user_tokens = { 'access_token': user_access_token, 'access_token_secret': user_access_token_secret, 'oauth_token': oauth_token } payload = "" start_date = afterDateObj index_total = 0 while start_date < date_now: end_date = start_date + timedelta(seconds=86400) if end_date > date_now: end_date = date_now start_date_tmstmp = str(int(start_date.timestamp())) start_date_str = start_date.strftime("%Y-%m-%d") end_date_tmstmp = str(int(end_date.timestamp())) end_date_str = end_date.strftime("%Y-%m-%d") logging.info( "\t Download Garmin Health activities from %s to %s " % (start_date_str, end_date_str)) signin_parameters = { 'upload_start_time': start_date_tmstmp, 'upload_end_time': end_date_tmstmp, } signin_info = self._request_signin('GET', self.URI_ACTIVITIES_SUMMARY, user_tokens, parameters=signin_parameters) resp = requests.request("GET", signin_info['path'], data=payload, headers=signin_info['header']) if resp.status_code != 204 and resp.status_code != 200: logging.info( "\t An error occured while downloading Garmin Health activities from %s to %s " % (start_date_str, end_date_str)) json_data = resp.json() if json_data: for item in json_data: index_total = index_total + 1 activity = UploadedActivity() activity_name = item['activityType'] if item['deviceName'] is not 'unknown': activity_name = activity_name + " - " + item[ 'deviceName'] # parse date start to get timezone and date activity.StartTime = datetime.utcfromtimestamp( item['startTimeInSeconds']) activity.TZ = pytz.utc logging.debug("\tActivity start s/t %s: %s" % (activity.StartTime, activity_name)) activity.EndTime = activity.StartTime + timedelta( seconds=item["durationInSeconds"]) activity.ServiceData = {"ActivityID": item["summaryId"]} if "manual" in item: activity.ServiceData['Manual'] = item["manual"] else: activity.ServiceData['Manual'] = False # check if activity type ID exists if item["activityType"] not in self._reverseActivityTypeMappings: # TODO : Uncomment it when test are done #exclusions.append( # APIExcludeActivity("Unsupported activity type %s" % item["activityType"], # activity_id=item["summaryId"], # user_exception=UserException(UserExceptionType.Other))) logger.info("\t\tUnknown activity") continue activity.Type = self._reverseActivityTypeMappings[ item["activityType"]] activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=item["distanceInMeters"]) if "avgSpeedInMetersPerSecond" in item and "maxSpeedInMetersPerSecond" in item: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=item["avgSpeedInMetersPerSecond"], max=item["maxSpeedInMetersPerSecond"]) else: if "avgSpeedInMetersPerSecond" in item: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=item["avgSpeedInMetersPerSecond"]) if "maxSpeedInMetersPerSecond" in item: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, max=item["maxSpeedInMetersPerSecond"]) # Todo: find Garmin data name # activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, # value=ftbt_activity["calories"]) # Todo: find Garmin data name # activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=ride[ # "moving_time"] if "moving_time" in ride and ride[ # "moving_time"] > 0 else None) # They don't let you manually enter this, and I think it returns 0 for those activities. # Todo: find Garmin data name # if "average_watts" in ride: # activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, # avg=ride["average_watts"]) # Todo: find Garmin data # activity.GPS = ("start_latlng" in ride) and (ride["start_latlng"] is not None) if "averageHeartRateInBeatsPerMinute" in item and "maxHeartRateInBeatsPerMinute" in item: activity.Stats.HR.update( ActivityStatistic( ActivityStatisticUnit.BeatsPerMinute, avg=item["averageHeartRateInBeatsPerMinute"], max=item["maxHeartRateInBeatsPerMinute"])) else: if "averageHeartRateInBeatsPerMinute" in item: activity.Stats.HR.update( ActivityStatistic( ActivityStatisticUnit.BeatsPerMinute, avg=item[ "averageHeartRateInBeatsPerMinute"])) if "maxHeartRateInBeatsPerMinute" in item: activity.Stats.HR.update( ActivityStatistic( ActivityStatisticUnit.BeatsPerMinute, max=item["maxHeartRateInBeatsPerMinute"])) # Todo: find Garmin data name # if "average_cadence" in ride: # activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, # avg=ride["average_cadence"])) # Todo: find Garmin data name # if "average_temp" in ride: # activity.Stats.Temperature.update( # ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=ride["average_temp"])) # Todo: find Garmin data name if "calories" in item: activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilocalories, value=item["calories"]) activity.Name = activity_name activity.Private = False activity.Stationary = False activity.AdjustTZ() activity.CalculateUID() activities.append(activity) logging.info("\t\t Garmin Activity ID : " + str(item["summaryId"])) start_date = end_date logging.info("\t\t total Garmin activities downloaded : " + str(index_total)) return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] url = self.SingletrackerDomain + "getRidesByUserId" extID = svcRecord.ExternalID payload = {"userId": extID} headers = { 'content-type': "application/json", 'cache-control': "no-cache", } response = requests.post(url, data=json.dumps(payload), headers=headers) try: reqdata = response.json() except ValueError: raise APIException( "Failed parsing Singletracker list response %s - %s" % (resp.status_code, resp.text)) for ride in reqdata: activity = UploadedActivity() activity.StartTime = datetime.strptime( datetime.utcfromtimestamp( ride["startTime"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") if "stopTime" in ride: activity.EndTime = datetime.strptime( datetime.utcfromtimestamp( ride["stopTime"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") activity.ServiceData = { "ActivityID": ride["rideId"], "Manual": "False" } activity.Name = ride["trackName"] logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, activity.Name)) activity.Type = ActivityType.MountainBiking if "totalDistance" in ride: activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=ride["totalDistance"]) if "avgSpeed" in ride: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=ride["avgSpeed"]) activity.Notes = None activity.GPS = True activity.Private = False activity.Stationary = False # True = no sensor data activity.CalculateUID() activities.append(activity) return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] url = self.SetioDomain + "getRunsByUserId" extID = svcRecord.ExternalID payload = {"userId": extID} headers = { 'content-type': "application/json", 'cache-control': "no-cache", } response = requests.post(url, data=json.dumps(payload), headers=headers) try: reqdata = response.json() except ValueError: raise APIException("Failed parsing Setio list response %s - %s" % (resp.status_code, resp.text)) for ride in reqdata: activity = UploadedActivity() activity.StartTime = datetime.strptime( datetime.utcfromtimestamp( ride["startTimeStamp"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") if "stopTimeStamp" in ride: activity.EndTime = datetime.strptime( datetime.utcfromtimestamp( ride["stopTimeStamp"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") activity.ServiceData = { "ActivityID": ride["runId"], "Manual": "False" } activity.Name = ride["programName"] logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, activity.Name)) activity.Type = ActivityType.Running if "totalDistance" in ride: activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=ride["totalDistance"]) if "averageCadence" in ride: activity.Stats.Cadence.update( ActivityStatistic( ActivityStatisticUnit.RevolutionsPerMinute, avg=ride["averageCadence"])) if "averageSpeed" in ride: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=ride["averageSpeed"]) # get comment url = self.SetioDomain + "getRunComment" payload = { "userId": extID, "runId": activity.ServiceData["ActivityID"] } headers = { 'content-type': "application/json", 'cache-control': "no-cache", } streamdata = requests.post(url, data=json.dumps(payload), headers=headers) if streamdata.status_code == 500: raise APIException("Internal server error") if streamdata.status_code == 403: raise APIException("No authorization to download activity", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) activity.Notes = None if streamdata.status_code == 200: # Ok try: commentdata = streamdata.json() except: raise APIException("Stream data returned is not JSON") if "comment" in commentdata: activity.Notes = commentdata["comment"] activity.GPS = True activity.Private = False activity.Stationary = False # True = no sensor data activity.CalculateUID() activities.append(activity) return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] before = earliestDate = None while True: if before is not None and before < 0: break # Caused by activities that "happened" before the epoch. We generally don't care about those activities... logger.debug("Req with before=" + str(before) + "/" + str(earliestDate)) self._globalRateLimit() resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before}) if resp.status_code == 401: raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) earliestDate = None reqdata = resp.json() if not len(reqdata): break # No more activities to see for ride in reqdata: activity = UploadedActivity() activity.TZ = pytz.timezone(re.sub("^\([^\)]+\)\s*", "", ride["timezone"])) # Comes back as "(GMT -13:37) The Stuff/We Want"" activity.StartTime = pytz.utc.localize(datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ")) logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, ride["name"])) if not earliestDate or activity.StartTime < earliestDate: earliestDate = activity.StartTime before = calendar.timegm(activity.StartTime.astimezone(pytz.utc).timetuple()) activity.EndTime = activity.StartTime + timedelta(0, ride["elapsed_time"]) activity.ServiceData = {"ActivityID": ride["id"], "Manual": ride["manual"]} if ride["type"] not in self._reverseActivityTypeMappings: exclusions.append(APIExcludeActivity("Unsupported activity type %s" % ride["type"], activityId=ride["id"], userException=UserException(UserExceptionType.Other))) logger.debug("\t\tUnknown activity") continue activity.Type = self._reverseActivityTypeMappings[ride["type"]] activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=ride["distance"]) if "max_speed" in ride or "average_speed" in ride: activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.MetersPerSecond, avg=ride["average_speed"] if "average_speed" in ride else None, max=ride["max_speed"] if "max_speed" in ride else None) activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=ride["moving_time"] if "moving_time" in ride and ride["moving_time"] > 0 else None) # They don't let you manually enter this, and I think it returns 0 for those activities. # Strava doesn't handle "timer time" to the best of my knowledge - although they say they do look at the FIT total_timer_time field, so...? if "average_watts" in ride: activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, avg=ride["average_watts"]) if "average_heartrate" in ride: activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=ride["average_heartrate"])) if "max_heartrate" in ride: activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=ride["max_heartrate"])) if "average_cadence" in ride: activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=ride["average_cadence"])) if "average_temp" in ride: activity.Stats.Temperature.update(ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=ride["average_temp"])) if "calories" in ride: activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=ride["calories"]) activity.Name = ride["name"] activity.Private = ride["private"] activity.Stationary = ride["manual"] activity.GPS = ("start_latlng" in ride) and (ride["start_latlng"] is not None) activity.AdjustTZ() activity.CalculateUID() activities.append(activity) if not exhaustive or not earliestDate: break return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): oauthSession = self._oauthSession(serviceRecord) activities = [] exclusions = [] page_url = "https://api.endomondo.com/api/1/workouts" while True: resp = oauthSession.get(page_url) try: respList = resp.json()["data"] except ValueError: self._rateLimitBailout(resp) raise APIException("Error decoding activity list resp %s %s" % (resp.status_code, resp.text)) for actInfo in respList: activity = UploadedActivity() activity.StartTime = self._parseDate(actInfo["start_time"]) logger.debug("Activity s/t %s" % activity.StartTime) if "is_tracking" in actInfo and actInfo["is_tracking"]: exclusions.append( APIExcludeActivity( "Not complete", activity_id=actInfo["id"], permanent=False, user_exception=UserException( UserExceptionType.LiveTracking))) continue if "end_time" in actInfo: activity.EndTime = self._parseDate(actInfo["end_time"]) if actInfo["sport"] in self._activityMappings: activity.Type = self._activityMappings[actInfo["sport"]] # "duration" is timer time if "duration_total" in actInfo: activity.Stats.TimerTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=float(actInfo["duration_total"])) if "distance_total" in actInfo: activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Kilometers, value=float(actInfo["distance_total"])) if "calories_total" in actInfo: activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilocalories, value=float(actInfo["calories_total"])) activity.Stats.Elevation = ActivityStatistic( ActivityStatisticUnit.Meters) if "altitude_max" in actInfo: activity.Stats.Elevation.Max = float( actInfo["altitude_max"]) if "altitude_min" in actInfo: activity.Stats.Elevation.Min = float( actInfo["altitude_min"]) if "total_ascent" in actInfo: activity.Stats.Elevation.Gain = float( actInfo["total_ascent"]) if "total_descent" in actInfo: activity.Stats.Elevation.Loss = float( actInfo["total_descent"]) activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.KilometersPerHour) if "speed_max" in actInfo: activity.Stats.Speed.Max = float(actInfo["speed_max"]) if "heart_rate_avg" in actInfo: activity.Stats.HR = ActivityStatistic( ActivityStatisticUnit.BeatsPerMinute, avg=float(actInfo["heart_rate_avg"])) if "heart_rate_max" in actInfo: activity.Stats.HR.update( ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=float( actInfo["heart_rate_max"]))) if "cadence_avg" in actInfo: activity.Stats.Cadence = ActivityStatistic( ActivityStatisticUnit.RevolutionsPerMinute, avg=int(actInfo["cadence_avg"])) if "cadence_max" in actInfo: activity.Stats.Cadence.update( ActivityStatistic( ActivityStatisticUnit.RevolutionsPerMinute, max=int(actInfo["cadence_max"]))) if "power_avg" in actInfo: activity.Stats.Power = ActivityStatistic( ActivityStatisticUnit.Watts, avg=int(actInfo["power_avg"])) if "power_max" in actInfo: activity.Stats.Power.update( ActivityStatistic(ActivityStatisticUnit.Watts, max=int(actInfo["power_max"]))) if "title" in actInfo: activity.Name = actInfo["title"] activity.ServiceData = { "WorkoutID": int(actInfo["id"]), "Sport": actInfo["sport"] } activity.CalculateUID() activities.append(activity) paging = resp.json()["paging"] if "next" not in paging or not paging["next"] or not exhaustive: break else: page_url = paging["next"] return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): headers = self._getAuthHeaders(serviceRecord) activities = [] exclusions = [] pageUri = self.OpenFitEndpoint + "/fitnessActivities.json" activity_tz_cache_raw = cachedb.sporttracks_meta_cache.find_one({"ExternalID": serviceRecord.ExternalID}) activity_tz_cache_raw = activity_tz_cache_raw if activity_tz_cache_raw else {"Activities":[]} activity_tz_cache = dict([(x["ActivityURI"], x["TZ"]) for x in activity_tz_cache_raw["Activities"]]) while True: logger.debug("Req against " + pageUri) res = requests.get(pageUri, headers=headers) try: res = res.json() except ValueError: raise APIException("Could not decode activity list response %s %s" % (res.status_code, res.text)) for act in res["items"]: activity = UploadedActivity() activity.ServiceData = {"ActivityURI": act["uri"]} if len(act["name"].strip()): activity.Name = act["name"] # Longstanding ST.mobi bug causes it to return negative partial-hour timezones as "-2:-30" instead of "-2:30" fixed_start_time = re.sub(r":-(\d\d)", r":\1", act["start_time"]) activity.StartTime = dateutil.parser.parse(fixed_start_time) if isinstance(activity.StartTime.tzinfo, tzutc): activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value. else: activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo.utcoffset(activity.StartTime).total_seconds() / 60) # Convert the dateutil lame timezones into pytz awesome timezones. activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ) activity.EndTime = activity.StartTime + timedelta(seconds=float(act["duration"])) activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(act["duration"])) # OpenFit says this excludes paused times. # Sometimes activities get returned with a UTC timezone even when they are clearly not in UTC. if activity.TZ == pytz.utc: if act["uri"] in activity_tz_cache: activity.TZ = pytz.FixedOffset(activity_tz_cache[act["uri"]]) else: # So, we get the first location in the activity and calculate the TZ from that. try: firstLocation = self._downloadActivity(serviceRecord, activity, returnFirstLocation=True) except APIExcludeActivity: pass else: try: activity.CalculateTZ(firstLocation, recalculate=True) except: # We tried! pass else: activity.AdjustTZ() finally: activity_tz_cache[act["uri"]] = activity.StartTime.utcoffset().total_seconds() / 60 logger.debug("Activity s/t " + str(activity.StartTime)) activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(act["total_distance"])) types = [x.strip().lower() for x in act["type"].split(":")] types.reverse() # The incoming format is like "walking: hiking" and we want the most specific first activity.Type = None for type_key in types: if type_key in self._activityMappings: activity.Type = self._activityMappings[type_key] break if not activity.Type: exclusions.append(APIExcludeActivity("Unknown activity type %s" % act["type"], activityId=act["uri"], userException=UserException(UserExceptionType.Other))) continue activity.CalculateUID() activities.append(activity) if not exhaustive or "next" not in res or not len(res["next"]): break else: pageUri = res["next"] logger.debug("Writing back meta cache") cachedb.sporttracks_meta_cache.update({"ExternalID": serviceRecord.ExternalID}, {"ExternalID": serviceRecord.ExternalID, "Activities": [{"ActivityURI": k, "TZ": v} for k, v in activity_tz_cache.items()]}, upsert=True) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 session = self._get_session(record=serviceRecord) page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) self._rate_limit() retried_auth = False while True: res = session.get("http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}) # It's 10 PM and I have no clue why it's throwing these errors, maybe we just need to log in again? if res.status_code == 403 and not retried_auth: retried_auth = True session = self._get_session(serviceRecord, skip_cache=True) else: break try: res = res.json()["results"] except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s" % res.status_code) if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] activity = UploadedActivity() # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act activity.GPS = "endLatitude" in act activity.Private = act["privacy"]["key"] == "private" try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() if "sumDistance" in act and float(act["sumDistance"]["value"]) != 0: activity.Stats.Distance = ActivityStatistic(self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])) activity.Type = self._resolveActivityType(act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions