def DownloadActivityList(self, serviceRecord, exhaustive=False): cookies = self._get_cookies(serviceRecord) activities = [] exclusions = [] pageUri = self.OpenFitEndpoint + "/fitnessActivities.json" while True: logger.debug("Req against " + pageUri) res = requests.get(pageUri, cookies=cookies) res = res.json() for act in res["items"]: activity = UploadedActivity() if len(act["name"].strip()): activity.Name = act["name"] activity.StartTime = dateutil.parser.parse(act["start_time"]) activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo._offset.total_seconds() / 60) # Convert the dateutil lame timezones into pytz awesome timezones. activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ) activity.EndTime = activity.StartTime + timedelta(seconds=float(act["duration"])) logger.debug("Activity s/t " + str(activity.StartTime)) activity.Distance = float(act["total_distance"]) activity.Type = self._activityMappings[act["type"].lower()] activity.CalculateUID() activity.UploadedTo = [{"Connection": serviceRecord, "ActivityURI": act["uri"]}] activities.append(activity) if not exhaustive or "next" not in res or not len(res["next"]): break else: pageUri = res["next"] return activities, exclusions
def DownloadActivityList(self, svcRec, exhaustive=False): dbcl = self._getClient(svcRec) if not svcRec.Authorization["Full"]: syncRoot = "/" else: syncRoot = svcRec.Config["SyncRoot"] cache = cachedb.dropbox_cache.find_one({"ExternalID": svcRec.ExternalID}) if cache is None: cache = {"ExternalID": svcRec.ExternalID, "Structure": [], "Activities": {}} if "Structure" not in cache: cache["Structure"] = [] self._folderRecurse(cache["Structure"], dbcl, syncRoot) activities = [] exclusions = [] for dir in cache["Structure"]: for file in dir["Files"]: path = file["Path"] if svcRec.Authorization["Full"]: relPath = path.replace(syncRoot, "", 1) else: relPath = path.replace("/Apps/tapiriik/", "", 1) # dropbox api is meh api existing = [(k, x) for k, x in cache["Activities"].items() if x["Path"] == relPath] # path is relative to syncroot to reduce churn if they relocate it existing = existing[0] if existing else None if existing is not None: existUID, existing = existing if existing and existing["Rev"] == file["Rev"]: # don't need entire activity loaded here, just UID act = UploadedActivity() act.UID = existUID act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y %z") if "EndTime" in existing: # some cached activities may not have this, it is not essential act.EndTime = datetime.strptime(existing["EndTime"], "%H:%M:%S %d %m %Y %z") else: # get the full activity try: act, rev = self._getActivity(svcRec, dbcl, path) except APIExcludeActivity as e: logger.info("Encountered APIExcludeActivity %s" % str(e)) exclusions.append(e) continue del act.Waypoints act.Waypoints = [] # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM. cache["Activities"][act.UID] = {"Rev": rev, "Path": relPath, "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"), "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")} tagRes = self._tagActivity(relPath) act.UploadedTo = [{"Connection": svcRec, "Path": path, "Tagged":tagRes is not None}] act.Type = tagRes if tagRes is not None else ActivityType.Other logger.debug("Activity s/t %s" % act.StartTime) activities.append(act) cachedb.dropbox_cache.update({"ExternalID": svcRec.ExternalID}, cache, upsert=True) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): cookies = self._get_cookies(record=serviceRecord) activities = [] exclusions = [] pageUri = self.OpenFitEndpoint + "/fitnessActivities.json" while True: logger.debug("Req against " + pageUri) res = requests.get(pageUri, cookies=cookies) res = res.json() for act in res["items"]: activity = UploadedActivity() activity.UploadedTo = [{"Connection": serviceRecord, "ActivityURI": act["uri"]}] if len(act["name"].strip()): activity.Name = act["name"] activity.StartTime = dateutil.parser.parse(act["start_time"]) if isinstance(activity.StartTime.tzinfo, tzutc): activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value. else: activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo._offset.total_seconds() / 60) # Convert the dateutil lame timezones into pytz awesome timezones. activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ) activity.EndTime = activity.StartTime + timedelta(seconds=float(act["duration"])) # Sometimes activities get returned with a UTC timezone even when they are clearly not in UTC. if activity.TZ == pytz.utc: # So, we get the first location in the activity and calculate the TZ from that. try: firstLocation = self._downloadActivity(serviceRecord, activity, returnFirstLocation=True) except APIExcludeActivity: pass else: activity.CalculateTZ(firstLocation) activity.AdjustTZ() logger.debug("Activity s/t " + str(activity.StartTime)) activity.Distance = float(act["total_distance"]) types = [x.strip().lower() for x in act["type"].split(":")] types.reverse() # The incoming format is like "walking: hiking" and we want the most specific first activity.Type = None for type_key in types: if type_key in self._activityMappings: activity.Type = self._activityMappings[type_key] break if not activity.Type: exclusions.append(APIExcludeActivity("Unknown activity type %s" % act["type"], activityId=act["uri"])) continue activity.CalculateUID() activities.append(activity) if not exhaustive or "next" not in res or not len(res["next"]): break else: pageUri = res["next"] return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): cookies = self._get_cookies(record=serviceRecord) activities = [] exclusions = [] pageUri = self.OpenFitEndpoint + "/fitnessActivities.json" while True: logger.debug("Req against " + pageUri) res = requests.get(pageUri, cookies=cookies) res = res.json() for act in res["items"]: activity = UploadedActivity() activity.UploadedTo = [{"Connection": serviceRecord, "ActivityURI": act["uri"]}] if len(act["name"].strip()): activity.Name = act["name"] activity.StartTime = dateutil.parser.parse(act["start_time"]) if isinstance(activity.StartTime.tzinfo, tzutc): activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value. else: activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo._offset.total_seconds() / 60) # Convert the dateutil lame timezones into pytz awesome timezones. activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ) activity.EndTime = activity.StartTime + timedelta(seconds=float(act["duration"])) # Sometimes activities get returned with a UTC timezone even when they are clearly not in UTC. if activity.TZ == pytz.utc: # So, we get the first location in the activity and calculate the TZ from that. try: firstLocation = self._downloadActivity(serviceRecord, activity, returnFirstLocation=True) except APIExcludeActivity: pass else: activity.CalculateTZ(firstLocation) activity.AdjustTZ() logger.debug("Activity s/t " + str(activity.StartTime)) activity.Distance = float(act["total_distance"]) types = [x.strip().lower() for x in act["type"].split(":")] types.reverse() # The incoming format is like "walking: hiking" and we want the most specific first activity.Type = None for type_key in types: if type_key in self._activityMappings: activity.Type = self._activityMappings[type_key] break if not activity.Type: exclusions.append(APIExcludeActivity("Unknown activity type %s" % act["type"], activityId=act["uri"])) continue activity.CalculateUID() activities.append(activity) if not exhaustive or "next" not in res or not len(res["next"]): break else: pageUri = res["next"] return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] before = earliestDate = None while True: logger.debug("Req with before=" + str(before) + "/" + str(earliestDate)) resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before}) self._logAPICall("list", (svcRecord.ExternalID, str(earliestDate)), resp.status_code == 401) if resp.status_code == 401: raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) earliestDate = None reqdata = resp.json() if not len(reqdata): break # No more activities to see for ride in reqdata: activity = UploadedActivity() activity.TZ = pytz.timezone(re.sub("^\([^\)]+\)\s*", "", ride["timezone"])) # Comes back as "(GMT -13:37) The Stuff/We Want"" activity.StartTime = pytz.utc.localize(datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ")) logger.debug("\tActivity s/t " + str(activity.StartTime)) if not earliestDate or activity.StartTime < earliestDate: earliestDate = activity.StartTime before = calendar.timegm(activity.StartTime.astimezone(pytz.utc).timetuple()) if ride["start_latlng"] is None or ride["end_latlng"] is None or ride["distance"] is None or ride["distance"] == 0: exclusions.append(APIExcludeActivity("No path", activityId=ride["id"])) logger.debug("\t\tNo pts") continue # stationary activity - no syncing for now activity.EndTime = activity.StartTime + timedelta(0, ride["elapsed_time"]) activity.UploadedTo = [{"Connection": svcRecord, "ActivityID": ride["id"]}] actType = [k for k, v in self._reverseActivityTypeMappings.items() if v == ride["type"]] if not len(actType): exclusions.append(APIExcludeActivity("Unsupported activity type %s" % ride["type"], activityId=ride["id"])) logger.debug("\t\tUnknown activity") continue activity.Type = actType[0] activity.Distance = ride["distance"] activity.Name = ride["name"] activity.Private = ride["private"] activity.AdjustTZ() activity.CalculateUID() activities.append(activity) if not exhaustive or not earliestDate: break return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 cookies = self._get_cookies(record=serviceRecord) page = 1 pageSz = 50 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) res = requests.get("http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}, cookies=cookies) res = res.json()["results"] if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] if "beginLatitude" not in act or "endLatitude" not in act or (act["beginLatitude"] is act["endLatitude"] and act["beginLongitude"] is act["endLongitude"]): exclusions.append(APIExcludeActivity("No points", activityId=act["activityId"])) continue if "sumDistance" not in act: exclusions.append(APIExcludeActivity("No distance", activityId=act["activityId"])) continue activity = UploadedActivity() try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled": activity.Name = act["activityName"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time. activity.Distance = float(act["sumDistance"]["value"]) * (1.60934 if act["sumDistance"]["uom"] == "mile" else 1) * 1000 # In meters... activity.Type = self._resolveActivityType(act["activityType"]["key"]) activity.CalculateUID() activity.UploadedTo = [{"Connection": serviceRecord, "ActivityID": act["activityId"]}] activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): # grumble grumble strava api sucks grumble grumble # http://app.strava.com/api/v1/rides?athleteId=id activities = [] exclusions = [] before = earliestDate = None while True: resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before}) logger.debug("Req with before=" + str(before) + "/" + str(earliestDate)) earliestDate = None reqdata = resp.json() if not len(reqdata): break # No more activities to see for ride in reqdata: activity = UploadedActivity() activity.TZ = pytz.timezone(re.sub("^\([^\)]+\)\s*", "", ride["timezone"])) # Comes back as "(GMT -13:37) The Stuff/We Want"" activity.StartTime = pytz.utc.localize(datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ")) logger.debug("\tActivity s/t " + str(activity.StartTime)) if not earliestDate or activity.StartTime < earliestDate: earliestDate = activity.StartTime before = calendar.timegm(activity.StartTime.astimezone(pytz.utc).timetuple()) if ride["start_latlng"] is None or ride["end_latlng"] is None or ride["distance"] is None or ride["distance"] == 0: exclusions.append(APIExcludeActivity("No path", activityId=ride["id"])) continue # stationary activity - no syncing for now if ride["start_latlng"] == ride["end_latlng"]: exclusions.append(APIExcludeActivity("Only one waypoint", activityId=ride["id"])) continue # Only one waypoint, one would assume. activity.EndTime = activity.StartTime + timedelta(0, ride["elapsed_time"]) activity.UploadedTo = [{"Connection": svcRecord, "ActivityID": ride["id"]}] actType = [k for k, v in self._reverseActivityTypeMappings.items() if v == ride["type"]] if not len(actType): exclusions.append(APIExcludeActivity("Unsupported activity type", activityId=ride["id"])) continue activity.Type = actType[0] activity.Distance = ride["distance"] activity.Name = ride["name"] activity.AdjustTZ() activity.CalculateUID() activities.append(activity) if not exhaustive or not earliestDate: break return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 cookies = self._get_cookies(record=serviceRecord) page = 1 pageSz = 50 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) res = requests.get("http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}, cookies=cookies) res = res.json()["results"] if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] if "beginLatitude" not in act or "endLatitude" not in act or (act["beginLatitude"] is act["endLatitude"] and act["beginLongitude"] is act["endLongitude"]): exclusions.append(APIExcludeActivity("No points", activityId=act["activityId"])) continue if "sumDistance" not in act: exclusions.append(APIExcludeActivity("No distance", activityId=act["activityId"])) continue activity = UploadedActivity() try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled": activity.Name = act["activityName"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time. activity.Distance = float(act["sumDistance"]["value"]) * (1.60934 if act["sumDistance"]["uom"] == "mile" else 1) * 1000 # In meters... activity.Type = self._resolveActivityType(act["activityType"]["key"]) activity.CalculateUID() activity.UploadedTo = [{"Connection": serviceRecord, "ActivityID": act["activityId"]}] activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): # http://ridewithgps.com/users/1/trips.json?limit=200&order_by=created_at&order_dir=asc # offset also supported page = 1 pageSz = 50 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) # TODO: take advantage of their nice ETag support params = {"offset": (page - 1) * pageSz, "limit": pageSz} params = self._add_auth_params(params, record=serviceRecord) res = requests.get("http://ridewithgps.com/users/{}/trips.json".format(serviceRecord.ExternalID), params=params) res = res.json() total_pages = math.ceil(int(res["results_count"]) / pageSz) for act in res["results"]: if "first_lat" not in act or "last_lat" not in act: exclusions.append(APIExcludeActivity("No points", activityId=act["activityId"])) continue if "distance" not in act: exclusions.append(APIExcludeActivity("No distance", activityId=act["activityId"])) continue activity = UploadedActivity() activity.TZ = pytz.timezone(act["time_zone"]) logger.debug("Name " + act["name"] + ":") if len(act["name"].strip()): activity.Name = act["name"] activity.StartTime = pytz.utc.localize(datetime.strptime(act["departed_at"], "%Y-%m-%dT%H:%M:%SZ")) activity.EndTime = activity.StartTime + timedelta(seconds=self._duration_to_seconds(act["duration"])) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() activity.Distance = float(act["distance"]) # This value is already in meters... # Activity type is not implemented yet in RWGPS results; we will assume cycling, though perhaps "OTHER" wouuld be correct activity.Type = ActivityType.Cycling activity.CalculateUID() activity.UploadedTo = [{"Connection": serviceRecord, "ActivityID": act["id"]}] activities.append(activity) logger.debug("Finished page {} of {}".format(page, total_pages)) if not exhaustive or total_pages == page or total_pages == 0: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): # http://ridewithgps.com/users/1/trips.json?limit=200&order_by=created_at&order_dir=asc # offset also supported page = 1 pageSz = 50 activities = [] exclusions = [] while True: logger.debug("Req with " + str({ "start": (page - 1) * pageSz, "limit": pageSz })) # TODO: take advantage of their nice ETag support params = {"offset": (page - 1) * pageSz, "limit": pageSz} params = self._add_auth_params(params, record=serviceRecord) res = requests.get( "http://ridewithgps.com/users/{}/trips.json".format( serviceRecord.ExternalID), params=params) res = res.json() total_pages = math.ceil(int(res["results_count"]) / pageSz) for act in res["results"]: if "first_lat" not in act or "last_lat" not in act: exclusions.append( APIExcludeActivity("No points", activityId=act["activityId"], userException=UserException( UserExceptionType.Corrupt))) continue if "distance" not in act: exclusions.append( APIExcludeActivity("No distance", activityId=act["activityId"], userException=UserException( UserExceptionType.Corrupt))) continue activity = UploadedActivity() activity.TZ = pytz.timezone(act["time_zone"]) logger.debug("Name " + act["name"] + ":") if len(act["name"].strip()): activity.Name = act["name"] activity.StartTime = pytz.utc.localize( datetime.strptime(act["departed_at"], "%Y-%m-%dT%H:%M:%SZ")) activity.EndTime = activity.StartTime + timedelta( seconds=self._duration_to_seconds(act["duration"])) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() activity.Distance = float( act["distance"]) # This value is already in meters... # Activity type is not implemented yet in RWGPS results; we will assume cycling, though perhaps "OTHER" wouuld be correct activity.Type = ActivityType.Cycling activity.CalculateUID() activity.UploadedTo = [{ "Connection": serviceRecord, "ActivityID": act["id"] }] activities.append(activity) logger.debug("Finished page {} of {}".format(page, total_pages)) if not exhaustive or total_pages == page or total_pages == 0: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): activities = [] exclusions = [] earliestDate = None earliestFirstPageDate = None paged = False while True: before = "" if earliestDate is None else earliestDate.astimezone(pytz.utc).strftime("%Y-%m-%d %H:%M:%S UTC") params = {"authToken": serviceRecord.Authorization["AuthToken"], "maxResults": 45, "before": before} logger.debug("Req with " + str(params)) response = requests.get("http://api.mobile.endomondo.com/mobile/api/workout/list", params=params) if response.status_code != 200: if response.status_code == 401 or response.status_code == 403: raise APIAuthorizationException("No authorization to retrieve activity list") raise APIException("Unable to retrieve activity list " + str(response)) data = response.json() for act in data["data"]: startTime = pytz.utc.localize(datetime.strptime(act["start_time"], "%Y-%m-%d %H:%M:%S UTC")) if earliestDate is None or startTime < earliestDate: # probably redundant, I would assume it works out the TZes... earliestDate = startTime logger.debug("activity pre") if not act["has_points"]: logger.warning("\t no pts") exclusions.append(APIExcludeActivity("No points", activityId=act["id"])) continue # it'll break strava, which needs waypoints to find TZ. Meh if "tracking" in act and act["tracking"]: logger.warning("\t tracking") exclusions.append(APIExcludeActivity("In progress", activityId=act["id"], permanent=False)) continue # come back once they've completed the activity activity = UploadedActivity() activity.StartTime = startTime activity.EndTime = activity.StartTime + timedelta(0, round(act["duration_sec"])) logger.debug("\tActivity s/t " + str(activity.StartTime)) # attn service makers: why #(*%$ can't you all agree to use naive local time. So much simpler. cachedTrackData = cachedb.endomondo_activity_cache.find_one({"TrackID": act["id"]}) if cachedTrackData is None: data = self._downloadRawTrackRecord(serviceRecord, act["id"]) self._populateActivityFromTrackRecord(activity, data, minimumWaypoints=True) cachedTrackData = {"Owner": serviceRecord.ExternalID, "TrackID": act["id"], "Data": data, "StartTime": activity.StartTime} if not paged or AGGRESSIVE_CACHE: # Don't cache stuff that we won't need in the immediate future. cachedb.endomondo_activity_cache.insert(cachedTrackData) else: self._populateActivityFromTrackRecord(activity, cachedTrackData["Data"], minimumWaypoints=True) activity.Waypoints = [] if int(act["sport"]) in self._activityMappings: activity.Type = self._activityMappings[int(act["sport"])] activity.UploadedTo = [{"Connection": serviceRecord, "ActivityID": act["id"], "ActivityData": cachedTrackData["Data"]}] activity.CalculateUID() activities.append(activity) if not paged: earliestFirstPageDate = earliestDate if not exhaustive or ("more" in data and data["more"] is False): break else: paged = True if not AGGRESSIVE_CACHE: cachedb.endomondo_activity_cache.remove({"Owner": serviceRecord.ExternalID, "$or":[{"StartTime":{"$lt": earliestFirstPageDate}}, {"StartTime":{"$exists": False}}]}) return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] before = earliestDate = None while True: logger.debug("Req with before=" + str(before) + "/" + str(earliestDate)) resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before}) if resp.status_code == 401: raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) earliestDate = None reqdata = resp.json() if not len(reqdata): break # No more activities to see for ride in reqdata: activity = UploadedActivity() activity.TZ = pytz.timezone( re.sub("^\([^\)]+\)\s*", "", ride["timezone"]) ) # Comes back as "(GMT -13:37) The Stuff/We Want"" activity.StartTime = pytz.utc.localize( datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ")) logger.debug("\tActivity s/t " + str(activity.StartTime)) if not earliestDate or activity.StartTime < earliestDate: earliestDate = activity.StartTime before = calendar.timegm( activity.StartTime.astimezone(pytz.utc).timetuple()) if ride["start_latlng"] is None or ride[ "end_latlng"] is None or ride[ "distance"] is None or ride["distance"] == 0: exclusions.append( APIExcludeActivity("No path", activityId=ride["id"])) logger.debug("\t\tNo pts") continue # stationary activity - no syncing for now activity.EndTime = activity.StartTime + timedelta( 0, ride["elapsed_time"]) activity.UploadedTo = [{ "Connection": svcRecord, "ActivityID": ride["id"] }] actType = [ k for k, v in self._reverseActivityTypeMappings.items() if v == ride["type"] ] if not len(actType): exclusions.append( APIExcludeActivity("Unsupported activity type %s" % ride["type"], activityId=ride["id"])) logger.debug("\t\tUnknown activity") continue activity.Type = actType[0] activity.Distance = ride["distance"] activity.Name = ride["name"] activity.Private = ride["private"] activity.AdjustTZ() activity.CalculateUID() activities.append(activity) if not exhaustive or not earliestDate: break return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): activities = [] exclusions = [] earliestDate = None earliestFirstPageDate = None paged = False while True: before = "" if earliestDate is None else earliestDate.astimezone(pytz.utc).strftime("%Y-%m-%d %H:%M:%S UTC") params = {"authToken": serviceRecord.Authorization["AuthToken"], "maxResults": 45, "before": before} logger.debug("Req with " + str(params)) response = requests.get("http://api.mobile.endomondo.com/mobile/api/workout/list", params=params) if response.status_code != 200: if response.status_code == 401 or response.status_code == 403: raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) raise APIException("Unable to retrieve activity list " + str(response)) data = response.json() if "error" in data and data["error"]["type"] == "AUTH_FAILED": raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) track_ids = [] this_page_activities = [] for act in data["data"]: startTime = pytz.utc.localize(datetime.strptime(act["start_time"], "%Y-%m-%d %H:%M:%S UTC")) if earliestDate is None or startTime < earliestDate: # probably redundant, I would assume it works out the TZes... earliestDate = startTime logger.debug("activity pre") if not act["has_points"]: logger.warning("\t no pts") exclusions.append(APIExcludeActivity("No points", activityId=act["id"])) continue # it'll break strava, which needs waypoints to find TZ. Meh if "tracking" in act and act["tracking"]: logger.warning("\t tracking") exclusions.append(APIExcludeActivity("In progress", activityId=act["id"], permanent=False)) continue # come back once they've completed the activity track_ids.append(act["id"]) activity = UploadedActivity() activity.StartTime = startTime activity.EndTime = activity.StartTime + timedelta(0, round(act["duration_sec"])) logger.debug("\tActivity s/t " + str(activity.StartTime)) if int(act["sport"]) in self._activityMappings: activity.Type = self._activityMappings[int(act["sport"])] activity.UploadedTo = [{"Connection": serviceRecord, "ActivityID": act["id"]}] this_page_activities.append(activity) cached_track_tzs = cachedb.endomondo_activity_cache.find({"TrackID":{"$in": track_ids}}) cached_track_tzs = dict([(x["TrackID"], x) for x in cached_track_tzs]) logger.debug("Have" + str(len(cached_track_tzs.keys())) + "/" + str(len(track_ids)) + " cached TZ records") for activity in this_page_activities: # attn service makers: why #(*%$ can't you all agree to use naive local time. So much simpler. cachedTrackData = None track_id = activity.UploadedTo[0]["ActivityID"] if track_id not in cached_track_tzs: logger.debug("\t Resolving TZ for %s" % activity.StartTime) cachedTrackData = self._downloadRawTrackRecord(serviceRecord, track_id) try: self._populateActivityFromTrackData(activity, cachedTrackData, minimumWaypoints=True) except APIExcludeActivity as e: e.ExternalActivityID = track_id logger.info("Encountered APIExcludeActivity %s" % str(e)) exclusions.append(e) continue if not activity.TZ: logger.info("Couldn't determine TZ") exclusions.append(APIExcludeActivity("Couldn't determine TZ", activityId=track_id)) continue cachedTrackRecord = {"Owner": serviceRecord.ExternalID, "TrackID": track_id, "TZ": pickle.dumps(activity.TZ), "StartTime": activity.StartTime} cachedb.endomondo_activity_cache.insert(cachedTrackRecord) else: activity.TZ = pickle.loads(cached_track_tzs[track_id]["TZ"]) activity.AdjustTZ() # Everything returned is in UTC activity.UploadedTo[0]["ActivityData"] = cachedTrackData activity.Waypoints = [] activity.CalculateUID() activities.append(activity) if not paged: earliestFirstPageDate = earliestDate if not exhaustive or ("more" in data and data["more"] is False): break else: paged = True return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): def mapStatTriple(act, stats_obj, key, units): if "%s_max" % key in act and act["%s_max" % key]: stats_obj.update( ActivityStatistic(units, max=float(act["%s_max" % key]))) if "%s_min" % key in act and act["%s_min" % key]: stats_obj.update( ActivityStatistic(units, min=float(act["%s_min" % key]))) if "%s_avg" % key in act and act["%s_avg" % key]: stats_obj.update( ActivityStatistic(units, avg=float(act["%s_avg" % key]))) # http://ridewithgps.com/users/1/trips.json?limit=200&order_by=created_at&order_dir=asc # offset also supported activities = [] exclusions = [] # They don't actually support paging right now, for whatever reason params = self._add_auth_params({}, record=serviceRecord) res = requests.get("http://ridewithgps.com/users/{}/trips.json".format( serviceRecord.ExternalID), params=params) res = res.json() if res == []: return [], [] # No activities for act in res: if "distance" not in act: exclusions.append( APIExcludeActivity("No distance", activityId=act["id"], userException=UserException( UserExceptionType.Corrupt))) continue if "duration" not in act or not act["duration"]: exclusions.append( APIExcludeActivity("No duration", activityId=act["id"], userException=UserException( UserExceptionType.Corrupt))) continue activity = UploadedActivity() logger.debug("Name " + act["name"] + ":") if len(act["name"].strip()): activity.Name = act["name"] if len(act["description"].strip()): activity.Notes = act["description"] activity.GPS = act["is_gps"] activity.Stationary = not activity.GPS # I think # 0 = public, 1 = private, 2 = friends activity.Private = act["visibility"] == 1 activity.StartTime = dateutil.parser.parse(act["departed_at"]) try: activity.TZ = pytz.timezone(act["time_zone"]) except pytz.exceptions.UnknownTimeZoneError: # Sometimes the time_zone returned isn't quite what we'd like it # So, just pull the offset from the datetime if isinstance(activity.StartTime.tzinfo, tzutc): activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value. else: activity.TZ = pytz.FixedOffset( activity.StartTime.tzinfo.utcoffset( activity.StartTime).total_seconds() / 60) activity.StartTime = activity.StartTime.replace( tzinfo=activity.TZ) # Overwrite dateutil's sillyness activity.EndTime = activity.StartTime + timedelta( seconds=self._duration_to_seconds(act["duration"])) logger.debug("Activity s/t " + str(activity.StartTime)) activity.AdjustTZ() activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, float(act["distance"])) mapStatTriple(act, activity.Stats.Power, "watts", ActivityStatisticUnit.Watts) mapStatTriple(act, activity.Stats.Speed, "speed", ActivityStatisticUnit.KilometersPerHour) mapStatTriple(act, activity.Stats.Cadence, "cad", ActivityStatisticUnit.RevolutionsPerMinute) mapStatTriple(act, activity.Stats.HR, "hr", ActivityStatisticUnit.BeatsPerMinute) if "elevation_gain" in act and act["elevation_gain"]: activity.Stats.Elevation.update( ActivityStatistic(ActivityStatisticUnit.Meters, gain=float(act["elevation_gain"]))) if "elevation_loss" in act and act["elevation_loss"]: activity.Stats.Elevation.update( ActivityStatistic(ActivityStatisticUnit.Meters, loss=float(act["elevation_loss"]))) # Activity type is not implemented yet in RWGPS results; we will assume cycling, though perhaps "OTHER" wouuld be correct activity.Type = ActivityType.Cycling activity.CalculateUID() activity.UploadedTo = [{ "Connection": serviceRecord, "ActivityID": act["id"] }] activities.append(activity) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): activities = [] exclusions = [] earliestDate = None earliestFirstPageDate = None paged = False while True: before = "" if earliestDate is None else earliestDate.astimezone( pytz.utc).strftime("%Y-%m-%d %H:%M:%S UTC") params = { "authToken": serviceRecord.Authorization["AuthToken"], "maxResults": 45, "before": before } logger.debug("Req with " + str(params)) response = requests.get( "http://api.mobile.endomondo.com/mobile/api/workout/list", params=params) if response.status_code != 200: if response.status_code == 401 or response.status_code == 403: raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) raise APIException("Unable to retrieve activity list " + str(response)) data = response.json() if "error" in data and data["error"]["type"] == "AUTH_FAILED": raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) track_ids = [] this_page_activities = [] for act in data["data"]: startTime = pytz.utc.localize( datetime.strptime(act["start_time"], "%Y-%m-%d %H:%M:%S UTC")) if earliestDate is None or startTime < earliestDate: # probably redundant, I would assume it works out the TZes... earliestDate = startTime logger.debug("activity pre") if not act["has_points"]: logger.warning("\t no pts") exclusions.append( APIExcludeActivity("No points", activityId=act["id"])) continue # it'll break strava, which needs waypoints to find TZ. Meh if "tracking" in act and act["tracking"]: logger.warning("\t tracking") exclusions.append( APIExcludeActivity("In progress", activityId=act["id"], permanent=False)) continue # come back once they've completed the activity track_ids.append(act["id"]) activity = UploadedActivity() activity.StartTime = startTime activity.EndTime = activity.StartTime + timedelta( 0, round(act["duration_sec"])) logger.debug("\tActivity s/t " + str(activity.StartTime)) if int(act["sport"]) in self._activityMappings: activity.Type = self._activityMappings[int(act["sport"])] activity.UploadedTo = [{ "Connection": serviceRecord, "ActivityID": act["id"] }] this_page_activities.append(activity) cached_track_tzs = cachedb.endomondo_activity_cache.find( {"TrackID": { "$in": track_ids }}) cached_track_tzs = dict([(x["TrackID"], x) for x in cached_track_tzs]) logger.debug("Have" + str(len(cached_track_tzs.keys())) + "/" + str(len(track_ids)) + " cached TZ records") for activity in this_page_activities: # attn service makers: why #(*%$ can't you all agree to use naive local time. So much simpler. cachedTrackData = None track_id = activity.UploadedTo[0]["ActivityID"] if track_id not in cached_track_tzs: logger.debug("\t Resolving TZ for %s" % activity.StartTime) cachedTrackData = self._downloadRawTrackRecord( serviceRecord, track_id) try: self._populateActivityFromTrackData( activity, cachedTrackData, minimumWaypoints=True) except APIExcludeActivity as e: e.ExternalActivityID = track_id logger.info("Encountered APIExcludeActivity %s" % str(e)) exclusions.append(e) continue if not activity.TZ: logger.info("Couldn't determine TZ") exclusions.append( APIExcludeActivity("Couldn't determine TZ", activityId=track_id)) continue cachedTrackRecord = { "Owner": serviceRecord.ExternalID, "TrackID": track_id, "TZ": pickle.dumps(activity.TZ), "StartTime": activity.StartTime } cachedb.endomondo_activity_cache.insert(cachedTrackRecord) else: activity.TZ = pickle.loads( cached_track_tzs[track_id]["TZ"]) activity.AdjustTZ() # Everything returned is in UTC activity.UploadedTo[0]["ActivityData"] = cachedTrackData activity.Waypoints = [] activity.CalculateUID() activities.append(activity) if not paged: earliestFirstPageDate = earliestDate if not exhaustive or ("more" in data and data["more"] is False): break else: paged = True return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): def mapStatTriple(act, stats_obj, key, units): if "%s_max" % key in act and act["%s_max" % key]: stats_obj.update(ActivityStatistic(units, max=float(act["%s_max" % key]))) if "%s_min" % key in act and act["%s_min" % key]: stats_obj.update(ActivityStatistic(units, min=float(act["%s_min" % key]))) if "%s_avg" % key in act and act["%s_avg" % key]: stats_obj.update(ActivityStatistic(units, avg=float(act["%s_avg" % key]))) # http://ridewithgps.com/users/1/trips.json?limit=200&order_by=created_at&order_dir=asc # offset also supported activities = [] exclusions = [] # They don't actually support paging right now, for whatever reason params = self._add_auth_params({}, record=serviceRecord) res = requests.get("http://ridewithgps.com/users/{}/trips.json".format(serviceRecord.ExternalID), params=params) res = res.json() if res == []: return [], [] # No activities for act in res: if "distance" not in act: exclusions.append(APIExcludeActivity("No distance", activityId=act["id"], userException=UserException(UserExceptionType.Corrupt))) continue if "duration" not in act or not act["duration"]: exclusions.append(APIExcludeActivity("No duration", activityId=act["id"], userException=UserException(UserExceptionType.Corrupt))) continue activity = UploadedActivity() logger.debug("Name " + act["name"] + ":") if len(act["name"].strip()): activity.Name = act["name"] if len(act["description"].strip()): activity.Notes = act["description"] activity.GPS = act["is_gps"] activity.Stationary = not activity.GPS # I think # 0 = public, 1 = private, 2 = friends activity.Private = act["visibility"] == 1 activity.StartTime = dateutil.parser.parse(act["departed_at"]) try: activity.TZ = pytz.timezone(act["time_zone"]) except pytz.exceptions.UnknownTimeZoneError: # Sometimes the time_zone returned isn't quite what we'd like it # So, just pull the offset from the datetime if isinstance(activity.StartTime.tzinfo, tzutc): activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value. else: activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo.utcoffset(activity.StartTime).total_seconds() / 60) activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ) # Overwrite dateutil's sillyness activity.EndTime = activity.StartTime + timedelta(seconds=self._duration_to_seconds(act["duration"])) logger.debug("Activity s/t " + str(activity.StartTime)) activity.AdjustTZ() activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, float(act["distance"])) mapStatTriple(act, activity.Stats.Power, "watts", ActivityStatisticUnit.Watts) mapStatTriple(act, activity.Stats.Speed, "speed", ActivityStatisticUnit.KilometersPerHour) mapStatTriple(act, activity.Stats.Cadence, "cad", ActivityStatisticUnit.RevolutionsPerMinute) mapStatTriple(act, activity.Stats.HR, "hr", ActivityStatisticUnit.BeatsPerMinute) if "elevation_gain" in act and act["elevation_gain"]: activity.Stats.Elevation.update(ActivityStatistic(ActivityStatisticUnit.Meters, gain=float(act["elevation_gain"]))) if "elevation_loss" in act and act["elevation_loss"]: activity.Stats.Elevation.update(ActivityStatistic(ActivityStatisticUnit.Meters, loss=float(act["elevation_loss"]))) # Activity type is not implemented yet in RWGPS results; we will assume cycling, though perhaps "OTHER" wouuld be correct activity.Type = ActivityType.Cycling activity.CalculateUID() activity.UploadedTo = [{"Connection": serviceRecord, "ActivityID": act["id"]}] activities.append(activity) return activities, exclusions