def _populateActivity(self, rawRecord): ''' Populate the 1st level of the activity object with all details required for UID from RK API data ''' activity = UploadedActivity() # can stay local + naive here, recipient services can calculate TZ as required activity.StartTime = datetime.strptime(rawRecord["start_time"], "%a, %d %b %Y %H:%M:%S") activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=float(rawRecord["duration"])) # P. sure this is moving time activity.EndTime = activity.StartTime + timedelta( seconds=float(rawRecord["duration"]) ) # this is inaccurate with pauses - excluded from hash activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=rawRecord["total_distance"]) # I'm fairly sure this is how the RK calculation works. I remember I removed something exactly like this from ST.mobi, but I trust them more than I trust myself to get the speed right. if (activity.EndTime - activity.StartTime).total_seconds() > 0: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.KilometersPerHour, avg=activity.Stats.Distance.asUnits( ActivityStatisticUnit.Kilometers).Value / ((activity.EndTime - activity.StartTime).total_seconds() / 60 / 60)) activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilocalories, value=rawRecord["total_calories"] if "total_calories" in rawRecord else None) if rawRecord["type"] in self._activityMappings: activity.Type = self._activityMappings[rawRecord["type"]] activity.GPS = rawRecord["has_path"] and rawRecord[ 'tracking_mode'] == "outdoor" activity.Stationary = not rawRecord["has_path"] activity.CalculateUID() return activity
def _populateActivity(self, rawRecord): ''' Populate the 1st level of the activity object with all details required for UID from pulsstory API data ''' activity = UploadedActivity() # can stay local + naive here, recipient services can calculate TZ as required activity.Name = rawRecord["Name"] if "Name" in rawRecord else None activity.StartTime = datetime.strptime(rawRecord["StartTime"], "%Y-%m-%d %H:%M:%S") activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=float(rawRecord["Duration"])) activity.EndTime = activity.StartTime + timedelta( seconds=float(rawRecord["Duration"])) activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=rawRecord["Distance"]) if (activity.EndTime - activity.StartTime).total_seconds() > 0: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.KilometersPerHour, avg=activity.Stats.Distance.asUnits( ActivityStatisticUnit.Kilometers).Value / ((activity.EndTime - activity.StartTime).total_seconds() / 60 / 60)) activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilocalories, value=rawRecord["Energy"] if "Energy" in rawRecord else None) if rawRecord["Type"] in self._activityMappings: activity.Type = self._activityMappings[rawRecord["Type"]] activity.GPS = rawRecord["HasPath"] if "HasPath" in rawRecord else False activity.Stationary = rawRecord[ "HasPoints"] if "HasPoints" in rawRecord else True activity.Notes = rawRecord["Notes"] if "Notes" in rawRecord else None activity.Private = rawRecord[ "Private"] if "Private" in rawRecord else True activity.CalculateUID() return activity
def DownloadActivityList(self, serviceRecord, exhaustive=False): #https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?limit=20&start=0 page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) res = self._request_with_reauth(lambda session: session.get("https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}), serviceRecord) try: res = res.json() except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res_txt)) for act in res: activity = UploadedActivity() # stationary activities have movingDuration = None while non-gps static activities have 0.0 activity.Stationary = act["movingDuration"] is None activity.GPS = act["hasPolyline"] activity.Private = act["privacy"]["typeKey"] == "private" activity_name = act["activityName"] logger.debug("Name " + activity_name if activity_name is not None else "Untitled" + ":") if activity_name is not None and len(activity_name.strip()) and activity_name != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = activity_name activity_description = act["description"] if activity_description is not None and len(activity_description.strip()): activity.Notes = activity_description activity.StartTime = pytz.utc.localize(datetime.strptime(act["startTimeGMT"], "%Y-%m-%d %H:%M:%S")) if act["elapsedDuration"] is not None: activity.EndTime = activity.StartTime + timedelta(0, float(act["elapsedDuration"])/1000) elif act["duration"] is not None: activity.EndTime = activity.StartTime + timedelta(0, float(act["duration"])) else: # somehow duration is not defined. Set 1 second then. activity.EndTime = activity.StartTime + timedelta(0, 1) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) if "distance" in act and act["distance"] and float(act["distance"]) != 0: activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(act["distance"])) activity.Type = self._resolveActivityType(act["activityType"]["typeKey"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page)) if not exhaustive or len(res) == 0: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?limit=20&start=0 page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) res = self._request_with_reauth(lambda session: session.get("https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}), serviceRecord) try: res = res.json() except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res_txt)) for act in res: activity = UploadedActivity() # stationary activities have movingDuration = None while non-gps static activities have 0.0 activity.Stationary = act["movingDuration"] is None activity.GPS = act["hasPolyline"] activity.Private = act["privacy"]["typeKey"] == "private" activity_name = act["activityName"] logger.debug("Name " + activity_name if activity_name is not None else "Untitled" + ":") if activity_name is not None and len(activity_name.strip()) and activity_name != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = activity_name activity_description = act["description"] if activity_description is not None and len(activity_description.strip()): activity.Notes = activity_description activity.StartTime = pytz.utc.localize(datetime.strptime(act["startTimeGMT"], "%Y-%m-%d %H:%M:%S")) if act["elapsedDuration"] is not None: activity.EndTime = activity.StartTime + timedelta(0, float(act["elapsedDuration"])/1000) else: activity.EndTime = activity.StartTime + timedelta(0, float(act["duration"])) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) if "distance" in act and act["distance"] and float(act["distance"]) != 0: activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(act["distance"])) activity.Type = self._resolveActivityType(act["activityType"]["typeKey"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page)) if not exhaustive or len(res) == 0: break else: page += 1 return activities, exclusions
def _populateActivity(self, rawRecord): ''' Populate the 1st level of the activity object with all details required for UID from API data ''' activity = UploadedActivity() activity.StartTime = dateutil.parser.parse(rawRecord["start"]) activity.EndTime = activity.StartTime + timedelta(seconds=rawRecord["duration"]) activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=rawRecord["distance"]) activity.GPS = rawRecord["hasGps"] activity.Stationary = not rawRecord["hasGps"] activity.CalculateUID() return activity
def _populateActivity(self, rawRecord): ''' Populate the 1st level of the activity object with all details required for UID from RK API data ''' activity = UploadedActivity() # can stay local + naive here, recipient services can calculate TZ as required activity.StartTime = datetime.strptime(rawRecord["start_time"], "%a, %d %b %Y %H:%M:%S") activity.EndTime = activity.StartTime + timedelta(0, round(rawRecord["duration"])) # this is inaccurate with pauses - excluded from hash activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=rawRecord["total_distance"]) # I'm fairly sure this is how the RK calculation works. I remember I removed something exactly like this from ST.mobi, but I trust them more than I trust myself to get the speed right. if (activity.EndTime - activity.StartTime).total_seconds() > 0: activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour, avg=activity.Stats.Distance.asUnits(ActivityStatisticUnit.Kilometers).Value / ((activity.EndTime - activity.StartTime).total_seconds() / 60 / 60)) activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=rawRecord["total_calories"] if "total_calories" in rawRecord else None) if rawRecord["type"] in self._activityMappings: activity.Type = self._activityMappings[rawRecord["type"]] if "has_path" in rawRecord and rawRecord["has_path"] is False: activity.Stationary = True activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Time, value=timedelta(0, round(rawRecord["duration"]))) # Seems reasonable. else: activity.Stationary = False activity.CalculateUID() return activity
def DownloadActivityList(self, serviceRecord, exhaustive=False): activities = [] session = self._get_session(record=serviceRecord) session.headers.update({"Accept": "application/json"}) workouts_resp = session.get("https://api.trainerroad.com/api/careerworkouts") if workouts_resp.status_code != 200: if workouts_resp.status_code == 401: raise APIException("Invalid login", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) raise APIException("Workout listing error") cached_record = cachedb.trainerroad_meta.find_one({"ExternalID": serviceRecord.ExternalID}) if not cached_record: cached_workout_meta = {} else: cached_workout_meta = cached_record["Workouts"] workouts = workouts_resp.json() for workout in workouts: # Un/f their API doesn't provide the start/end times in the list response # So we need to pull the extra data, if it's not already cached workout_id = str(workout["Id"]) # Mongo doesn't do non-string keys if workout_id not in cached_workout_meta: meta_resp = session.get("https://api.trainerroad.com/api/careerworkouts?guid=%s" % workout["Guid"]) # We don't need everything full_meta = meta_resp.json() meta = {key: full_meta[key] for key in ["WorkoutDate", "WorkoutName", "WorkoutNotes", "TotalMinutes", "TotalKM", "AvgWatts", "Kj"]} cached_workout_meta[workout_id] = meta else: meta = cached_workout_meta[workout_id] activity = UploadedActivity() activity.ServiceData = {"ID": int(workout_id)} activity.Name = meta["WorkoutName"] activity.Notes = meta["WorkoutNotes"] activity.Type = ActivityType.Cycling # Everything's in UTC activity.StartTime = dateutil.parser.parse(meta["WorkoutDate"]).replace(tzinfo=pytz.utc) activity.EndTime = activity.StartTime + timedelta(minutes=meta["TotalMinutes"]) activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=meta["TotalKM"]) activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, avg=meta["AvgWatts"]) activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilojoules, value=meta["Kj"]) activity.Stationary = False activity.GPS = False activity.CalculateUID() activities.append(activity) cachedb.trainerroad_meta.update({"ExternalID": serviceRecord.ExternalID}, {"ExternalID": serviceRecord.ExternalID, "Workouts": cached_workout_meta}, upsert=True) return activities, []
def DownloadActivityList(self, serviceRecord, exhaustive=False): #TODO find out polar session timeout session = self._get_session(serviceRecord) activities = [] exclusions = [] date_format = "{d.day}.{d.month}.{d.year}" end_date = datetime.now() + timedelta(days=1.5) start_date = date(1961, 4, 12) if exhaustive else end_date - timedelta(days=60) params = { "startDate": date_format.format(d=start_date), "endDate": date_format.format(d=end_date) } res = session.get("https://polarpersonaltrainer.com/user/calendar/inc/listview.ftl", params=params) bs = BeautifulSoup(res.text, "html.parser") for activity_row in bs.select("tr[class^=listRow]"): data_cells = activity_row.findAll("td") info_cell = 0 date_cell = 4 time_cell = 3 result_type_cell = 5 sport_type_cell = 6 type_data = data_cells[info_cell].find("input", {"name": "calendarItemTypes"}) # Skip fitness data whatever if type_data["value"] == "OptimizedExercise": activity = UploadedActivity() id = data_cells[info_cell].find("input", {"name": "calendarItem"})["value"] name = data_cells[info_cell].find("input", {"name": "calendarItemName"})["value"] activity.ExternalID = id activity.Name = name time_text = "{} {}".format(data_cells[date_cell].contents[0], data_cells[time_cell].contents[0]) activity.StartTime = pytz.utc.localize(datetime.strptime(time_text, "%d.%m.%Y %H:%M")) result_type_text = data_cells[result_type_cell].contents[0] if "Strength Training Result" in result_type_text: activity.Type = ActivityType.StrengthTraining # This type of activity always stationary activity.Stationary = True else: type_text = data_cells[sport_type_cell].contents[0] activity.Type = self._reverseActivityMappings.get(type_text.lower(), ActivityType.Other) logger.debug("\tActivity s/t {}: {}".format(activity.StartTime, activity.Type)) activity.CalculateUID() activities.append(activity) return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] url = self.SingletrackerDomain + "getRidesByUserId" extID = svcRecord.ExternalID payload = {"userId": extID} headers = { 'content-type': "application/json", 'cache-control': "no-cache", } response = requests.post(url, data=json.dumps(payload), headers=headers) try: reqdata = response.json() except ValueError: raise APIException("Failed parsing Singletracker list response %s - %s" % (resp.status_code, resp.text)) for ride in reqdata: activity = UploadedActivity() activity.StartTime = datetime.strptime( datetime.utcfromtimestamp(ride["startTime"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") if "stopTime" in ride: activity.EndTime = datetime.strptime( datetime.utcfromtimestamp(ride["stopTime"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") activity.ServiceData = {"ActivityID": ride["rideId"], "Manual": "False"} activity.Name = ride["trackName"] logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, activity.Name)) activity.Type = ActivityType.MountainBiking if "totalDistance" in ride: activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=ride["totalDistance"]) if "avgSpeed" in ride: activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.MetersPerSecond, avg=ride["avgSpeed"]) activity.Notes = None activity.GPS = True activity.Private = False activity.Stationary = False # True = no sensor data activity.CalculateUID() activities.append(activity) return activities, exclusions
def _populateActivity(self, rawRecord): ''' Populate the 1st level of the activity object with all details required for UID from pulsstory API data ''' activity = UploadedActivity() # can stay local + naive here, recipient services can calculate TZ as required activity.Name = rawRecord["Name"] if "Name" in rawRecord else None activity.StartTime = datetime.strptime(rawRecord["StartTime"], "%Y-%m-%d %H:%M:%S") activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(rawRecord["Duration"])) activity.EndTime = activity.StartTime + timedelta(seconds=float(rawRecord["Duration"])) activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=rawRecord["Distance"]) if (activity.EndTime - activity.StartTime).total_seconds() > 0: activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour, avg=activity.Stats.Distance.asUnits(ActivityStatisticUnit.Kilometers).Value / ((activity.EndTime - activity.StartTime).total_seconds() / 60 / 60)) activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=rawRecord["Energy"] if "Energy" in rawRecord else None) if rawRecord["Type"] in self._activityMappings: activity.Type = self._activityMappings[rawRecord["Type"]] activity.GPS = rawRecord["HasPath"] if "HasPath" in rawRecord else False activity.Stationary = rawRecord["HasPoints"] if "HasPoints" in rawRecord else True activity.Notes = rawRecord["Notes"] if "Notes" in rawRecord else None activity.Private = rawRecord["Private"] if "Private" in rawRecord else True activity.CalculateUID() return activity
def _create_post(self, data): post = UploadedActivity() post.Type = ActivityType.Report post.Stationary = True post_xml = data.find("post") post.Name = post_xml.get("title") post.NotesExt = post_xml.get("formatted_body") post.StartTime = pytz.utc.localize(datetime.strptime(post_xml.get("created_at"), "%Y-%m-%dT%H:%M:%SZ")) #need to set EndTime for consistency post.EndTime = post.StartTime post.ServiceData = {"ActivityID": post_xml.get("id")} if int(post_xml.get("photos_count")) > 0: for photo_xml in data.findall("photos/photo"): post.PhotoUrls.append({"id": photo_xml.get("id"), "url": photo_xml.get("image_original")}) logger.debug("\tPost s/t {}: {}".format(post.StartTime, post.Type)) post.CalculateUID() return post
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 session = self._get_session(record=serviceRecord) page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) self._rate_limit() retried_auth = False while True: res = session.get("http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}) # It's 10 PM and I have no clue why it's throwing these errors, maybe we just need to log in again? if res.status_code == 403 and not retried_auth: retried_auth = True session = self._get_session(serviceRecord, skip_cache=True) else: break try: res = res.json()["results"] except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s" % res.status_code) if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] activity = UploadedActivity() # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act activity.GPS = "endLatitude" in act activity.Private = act["privacy"]["key"] == "private" try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() if "sumDistance" in act and float(act["sumDistance"]["value"]) != 0: activity.Stats.Distance = ActivityStatistic(self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])) activity.Type = self._resolveActivityType(act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] before = earliestDate = None # define low parameter # get Garmin info # WARNING : BE CAREFULL ABOUT DATE FILTER # date filter of this request will follow this process : # - fetching activity with uploaded date between upload_start_time & upload_end_time # - return matched activities # For example : # if you upload at 20-05-2019 an activity into Garmin with start date 01-01-2019 # and you use upload_start_time=20-05-2019 & upload_end_time=21-05-2019 # the 01-01-2019 will be return # So we download activities from upload date service_id = svcRecord._id user = db.users.find_one({ 'ConnectedServices': { '$elemMatch': { 'ID': service_id, 'Service': self.ID } } }) afterDateObj = datetime.now() - timedelta(days=1) afterDate = afterDateObj.strftime("%Y-%m-%d") afterDate_tstmp = str(int(afterDateObj.timestamp())) date_now = datetime.now() now_tstmp = str(int(date_now.timestamp())) userID = svcRecord.ExternalID oauth_token = svcRecord.Authorization.get('OAuthToken') user_access_token = svcRecord.Authorization.get('AccessToken') user_access_token_secret = svcRecord.Authorization.get( 'AccessTokenSecret') logging.info("\t Download Garmin Health activities since : " + afterDate) logging.info("\t Building signin for activities summary") user_tokens = { 'access_token': user_access_token, 'access_token_secret': user_access_token_secret, 'oauth_token': oauth_token } payload = "" start_date = afterDateObj index_total = 0 while start_date < date_now: end_date = start_date + timedelta(seconds=86400) if end_date > date_now: end_date = date_now start_date_tmstmp = str(int(start_date.timestamp())) start_date_str = start_date.strftime("%Y-%m-%d") end_date_tmstmp = str(int(end_date.timestamp())) end_date_str = end_date.strftime("%Y-%m-%d") logging.info( "\t Download Garmin Health activities from %s to %s " % (start_date_str, end_date_str)) signin_parameters = { 'upload_start_time': start_date_tmstmp, 'upload_end_time': end_date_tmstmp, } signin_info = self._request_signin('GET', self.URI_ACTIVITIES_SUMMARY, user_tokens, parameters=signin_parameters) resp = requests.request("GET", signin_info['path'], data=payload, headers=signin_info['header']) if resp.status_code != 204 and resp.status_code != 200: logging.info( "\t An error occured while downloading Garmin Health activities from %s to %s " % (start_date_str, end_date_str)) json_data = resp.json() if json_data: for item in json_data: index_total = index_total + 1 activity = UploadedActivity() activity_name = item['activityType'] if item['deviceName'] is not 'unknown': activity_name = activity_name + " - " + item[ 'deviceName'] # parse date start to get timezone and date activity.StartTime = datetime.utcfromtimestamp( item['startTimeInSeconds']) activity.TZ = pytz.utc logging.debug("\tActivity start s/t %s: %s" % (activity.StartTime, activity_name)) activity.EndTime = activity.StartTime + timedelta( seconds=item["durationInSeconds"]) activity.ServiceData = {"ActivityID": item["summaryId"]} if "manual" in item: activity.ServiceData['Manual'] = item["manual"] else: activity.ServiceData['Manual'] = False # check if activity type ID exists if item["activityType"] not in self._reverseActivityTypeMappings: # TODO : Uncomment it when test are done #exclusions.append( # APIExcludeActivity("Unsupported activity type %s" % item["activityType"], # activity_id=item["summaryId"], # user_exception=UserException(UserExceptionType.Other))) logger.info("\t\tUnknown activity") continue activity.Type = self._reverseActivityTypeMappings[ item["activityType"]] activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=item["distanceInMeters"]) if "avgSpeedInMetersPerSecond" in item and "maxSpeedInMetersPerSecond" in item: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=item["avgSpeedInMetersPerSecond"], max=item["maxSpeedInMetersPerSecond"]) else: if "avgSpeedInMetersPerSecond" in item: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=item["avgSpeedInMetersPerSecond"]) if "maxSpeedInMetersPerSecond" in item: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, max=item["maxSpeedInMetersPerSecond"]) # Todo: find Garmin data name # activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, # value=ftbt_activity["calories"]) # Todo: find Garmin data name # activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=ride[ # "moving_time"] if "moving_time" in ride and ride[ # "moving_time"] > 0 else None) # They don't let you manually enter this, and I think it returns 0 for those activities. # Todo: find Garmin data name # if "average_watts" in ride: # activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, # avg=ride["average_watts"]) # Todo: find Garmin data # activity.GPS = ("start_latlng" in ride) and (ride["start_latlng"] is not None) if "averageHeartRateInBeatsPerMinute" in item and "maxHeartRateInBeatsPerMinute" in item: activity.Stats.HR.update( ActivityStatistic( ActivityStatisticUnit.BeatsPerMinute, avg=item["averageHeartRateInBeatsPerMinute"], max=item["maxHeartRateInBeatsPerMinute"])) else: if "averageHeartRateInBeatsPerMinute" in item: activity.Stats.HR.update( ActivityStatistic( ActivityStatisticUnit.BeatsPerMinute, avg=item[ "averageHeartRateInBeatsPerMinute"])) if "maxHeartRateInBeatsPerMinute" in item: activity.Stats.HR.update( ActivityStatistic( ActivityStatisticUnit.BeatsPerMinute, max=item["maxHeartRateInBeatsPerMinute"])) # Todo: find Garmin data name # if "average_cadence" in ride: # activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, # avg=ride["average_cadence"])) # Todo: find Garmin data name # if "average_temp" in ride: # activity.Stats.Temperature.update( # ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=ride["average_temp"])) # Todo: find Garmin data name if "calories" in item: activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilocalories, value=item["calories"]) activity.Name = activity_name activity.Private = False activity.Stationary = False activity.AdjustTZ() activity.CalculateUID() activities.append(activity) logging.info("\t\t Garmin Activity ID : " + str(item["summaryId"])) start_date = end_date logging.info("\t\t total Garmin activities downloaded : " + str(index_total)) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) res = self._request_with_reauth(serviceRecord, lambda session: session.get("https://connect.garmin.com/modern/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz})) try: res = res.json()["results"] except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res.text)) if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] activity = UploadedActivity() # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act activity.GPS = "endLatitude" in act activity.Private = act["privacy"]["key"] == "private" try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() if "sumDistance" in act and float(act["sumDistance"]["value"]) != 0: activity.Stats.Distance = ActivityStatistic(self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])) if "device" in act and act["device"]["key"] != "unknown": devId = DeviceIdentifier.FindMatchingIdentifierOfType(DeviceIdentifierType.GC, {"Key": act["device"]["key"]}) ver_split = act["device"]["key"].split(".") ver_maj = None ver_min = None if len(ver_split) == 4: # 2.90.0.0 ver_maj = int(ver_split[0]) ver_min = int(ver_split[1]) activity.Device = Device(devId, verMaj=ver_maj, verMin=ver_min) activity.Type = self._resolveActivityType(act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 cookies = self._get_cookies(record=serviceRecord) page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({ "start": (page - 1) * pageSz, "limit": pageSz })) self._rate_limit() res = requests.get( "http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={ "start": (page - 1) * pageSz, "limit": pageSz }, cookies=cookies) res = res.json()["results"] if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] if "sumDistance" not in act: exclusions.append( APIExcludeActivity("No distance", activityId=act["activityId"], userException=UserException( UserExceptionType.Corrupt))) continue activity = UploadedActivity() if "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act: # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = True else: activity.Stationary = False try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset( float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip( )) and act["activityName"][ "value"] != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize( datetime.utcfromtimestamp( float(act["beginTimestamp"]["millis"]) / 1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta( 0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta( minutes=float(act["sumDuration"] ["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"] ["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize( datetime.utcfromtimestamp( float(act["endTimestamp"]["millis"]) / 1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time. activity.Stats.Distance = ActivityStatistic( self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])) def mapStat(gcKey, statKey, type, useSourceUnits=False): nonlocal activity, act if gcKey in act: value = float(act[gcKey]["value"]) if math.isinf(value): return # GC returns the minimum speed as "-Infinity" instead of 0 some times :S activity.Stats.__dict__[statKey].update( ActivityStatistic(self._unitMap[act[gcKey]["uom"]], **({ type: value }))) if useSourceUnits: activity.Stats.__dict__[ statKey] = activity.Stats.__dict__[ statKey].asUnits( self._unitMap[act[gcKey]["uom"]]) if "sumMovingDuration" in act: activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Time, value=timedelta( seconds=float(act["sumMovingDuration"]["value"]))) if "sumDuration" in act: activity.Stats.TimerTime = ActivityStatistic( ActivityStatisticUnit.Time, value=timedelta( minutes=float(act["sumDuration"] ["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"] ["minutesSeconds"].split(":")[1]))) mapStat( "minSpeed", "Speed", "min", useSourceUnits=True ) # We need to suppress conversion here, so we can fix the pace-speed issue below mapStat("maxSpeed", "Speed", "max", useSourceUnits=True) mapStat("weightedMeanSpeed", "Speed", "avg", useSourceUnits=True) mapStat("minAirTemperature", "Temperature", "min") mapStat("maxAirTemperature", "Temperature", "max") mapStat("weightedMeanAirTemperature", "Temperature", "avg") mapStat("sumEnergy", "Energy", "value") mapStat("maxHeartRate", "HR", "max") mapStat("weightedMeanHeartRate", "HR", "avg") mapStat("maxRunCadence", "RunCadence", "max") mapStat("weightedMeanRunCadence", "RunCadence", "avg") mapStat("maxBikeCadence", "Cadence", "max") mapStat("weightedMeanBikeCadence", "Cadence", "avg") mapStat("minPower", "Power", "min") mapStat("maxPower", "Power", "max") mapStat("weightedMeanPower", "Power", "avg") mapStat("minElevation", "Elevation", "min") mapStat("maxElevation", "Elevation", "max") mapStat("gainElevation", "Elevation", "gain") mapStat("lossElevation", "Elevation", "loss") # In Garmin Land, max can be smaller than min for this field :S if activity.Stats.Power.Max is not None and activity.Stats.Power.Min is not None and activity.Stats.Power.Min > activity.Stats.Power.Max: activity.Stats.Power.Min = None # To get it to match what the user sees in GC. if activity.Stats.RunCadence.Max is not None: activity.Stats.RunCadence.Max *= 2 if activity.Stats.RunCadence.Average is not None: activity.Stats.RunCadence.Average *= 2 # GC incorrectly reports pace measurements as kph/mph when they are in fact in min/km or min/mi if "minSpeed" in act: if ":" in act["minSpeed"][ "withUnitAbbr"] and activity.Stats.Speed.Min: activity.Stats.Speed.Min = 60 / activity.Stats.Speed.Min if "maxSpeed" in act: if ":" in act["maxSpeed"][ "withUnitAbbr"] and activity.Stats.Speed.Max: activity.Stats.Speed.Max = 60 / activity.Stats.Speed.Max if "weightedMeanSpeed" in act: if ":" in act["weightedMeanSpeed"][ "withUnitAbbr"] and activity.Stats.Speed.Average: activity.Stats.Speed.Average = 60 / activity.Stats.Speed.Average # Similarly, they do weird stuff with HR at times - %-of-max and zones # ...and we can't just fix these, so we have to calculate it after the fact (blegh) recalcHR = False if "maxHeartRate" in act: if "%" in act["maxHeartRate"]["withUnitAbbr"] or "z" in act[ "maxHeartRate"]["withUnitAbbr"]: activity.Stats.HR.Max = None recalcHR = True if "weightedMeanHeartRate" in act: if "%" in act["weightedMeanHeartRate"][ "withUnitAbbr"] or "z" in act[ "weightedMeanHeartRate"]["withUnitAbbr"]: activity.Stats.HR.Average = None recalcHR = True activity.Type = self._resolveActivityType( act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = { "ActivityID": act["activityId"], "RecalcHR": recalcHR } activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): activities = [] exclusions = [] earliestDate = None earliestFirstPageDate = None paged = False while True: before = "" if earliestDate is None else earliestDate.astimezone(pytz.utc).strftime("%Y-%m-%d %H:%M:%S UTC") params = {"authToken": serviceRecord.Authorization["AuthToken"], "maxResults": 45, "before": before} logger.debug("Req with " + str(params)) response = requests.get("http://api.mobile.endomondo.com/mobile/api/workout/list", params=params) if response.status_code != 200: if response.status_code == 401 or response.status_code == 403: raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) raise APIException("Unable to retrieve activity list " + str(response)) data = response.json() if "error" in data and data["error"]["type"] == "AUTH_FAILED": raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) track_ids = [] this_page_activities = [] for act in data["data"]: startTime = pytz.utc.localize(datetime.strptime(act["start_time"], "%Y-%m-%d %H:%M:%S UTC")) if earliestDate is None or startTime < earliestDate: # probably redundant, I would assume it works out the TZes... earliestDate = startTime logger.debug("activity pre") if "tracking" in act and act["tracking"]: logger.warning("\t tracking") exclusions.append(APIExcludeActivity("In progress", activityId=act["id"], permanent=False)) continue # come back once they've completed the activity track_ids.append(act["id"]) activity = UploadedActivity() activity.StartTime = startTime activity.EndTime = activity.StartTime + timedelta(0, round(act["duration_sec"])) logger.debug("\tActivity s/t " + str(activity.StartTime)) activity.Stationary = not act["has_points"] if int(act["sport"]) in self._activityMappings: activity.Type = self._activityMappings[int(act["sport"])] activity.ServiceData = {"ActivityID": act["id"]} this_page_activities.append(activity) cached_track_tzs = cachedb.endomondo_activity_cache.find({"TrackID":{"$in": track_ids}}) cached_track_tzs = dict([(x["TrackID"], x) for x in cached_track_tzs]) logger.debug("Have" + str(len(cached_track_tzs.keys())) + "/" + str(len(track_ids)) + " cached TZ records") for activity in this_page_activities: # attn service makers: why #(*%$ can't you all agree to use naive local time. So much simpler. cachedTrackData = None track_id = activity.ServiceData["ActivityID"] if track_id not in cached_track_tzs: logger.debug("\t Resolving TZ for %s" % activity.StartTime) cachedTrackData = self._downloadRawTrackRecord(serviceRecord, track_id) try: self._populateActivityFromTrackData(activity, cachedTrackData, minimumWaypoints=True) except APIExcludeActivity as e: e.ExternalActivityID = track_id logger.info("Encountered APIExcludeActivity %s" % str(e)) exclusions.append(e) continue if not activity.TZ and not activity.Stationary: logger.info("Couldn't determine TZ") exclusions.append(APIExcludeActivity("Couldn't determine TZ", activityId=track_id)) continue cachedTrackRecord = {"Owner": serviceRecord.ExternalID, "TrackID": track_id, "TZ": pickle.dumps(activity.TZ), "StartTime": activity.StartTime} cachedb.endomondo_activity_cache.insert(cachedTrackRecord) elif not activity.Stationary: activity.TZ = pickle.loads(cached_track_tzs[track_id]["TZ"]) activity.AdjustTZ() # Everything returned is in UTC activity.Laps = [] if int(act["sport"]) in self._activityMappings: activity.Type = self._activityMappings[int(act["sport"])] activity.ServiceData = {"ActivityID": act["id"], "ActivityData": cachedTrackData} activity.CalculateUID() activities.append(activity) if not paged: earliestFirstPageDate = earliestDate if not exhaustive or ("more" in data and data["more"] is False): break else: paged = True return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] now = datetime.now() prev = now - timedelta(6 * 365 / 12) period = [] aperiod = "%s%02d-%s%02d" % (prev.year, prev.month, now.year, now.month) period.append(aperiod) if exhaustive: for _ in range(20): now = prev prev = now - timedelta(6 * 365 / 12) aperiod = "%s%02d-%s%02d" % (prev.year, prev.month, now.year, now.month) period.append(aperiod) for dateInterval in period: headers = self._getAuthHeaders(svcRecord) resp = requests.get(self.ApiEndpoint + "/users/" + str(svcRecord.ExternalID) + "/activities.xml?date=" + dateInterval, headers=headers) if resp.status_code == 400: logger.info(resp.content) raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) if resp.status_code == 401: logger.info(resp.content) raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) if resp.status_code == 403: logger.info(resp.content) raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) root = xml.fromstring(resp.content) logger.info("\t\t nb activity : " + str(len(root.findall('.//ID')))) for ride in root.iter('ACTIVITY'): activity = UploadedActivity() activity.TZ = pytz.timezone("UTC") startdate = ride.find('.//STARTDATE').text + ride.find( './/TIMEZONE').text datebase = parse(startdate) activity.StartTime = datebase #pytz.utc.localize(datebase) activity.ServiceData = { "ActivityID": ride.find('ID').text, "Manual": ride.find('MANUAL').text } logger.info("\t\t DecathlonCoach Activity ID : " + ride.find('ID').text) if ride.find('SPORTID' ).text not in self._reverseActivityTypeMappings: exclusions.append( APIExcludeActivity("Unsupported activity type %s" % ride.find('SPORTID').text, activity_id=ride.find('ID').text, user_exception=UserException( UserExceptionType.Other))) logger.info( "\t\tDecathlonCoach Unknown activity, sport id " + ride.find('SPORTID').text + " is not mapped") continue activity.Type = self._reverseActivityTypeMappings[ride.find( 'SPORTID').text] for val in ride.iter('VALUE'): if val.get('id') == self._unitMap["duration"]: activity.EndTime = activity.StartTime + timedelta( 0, int(val.text)) if val.get('id') == self._unitMap["distance"]: activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=int(val.text)) if val.get('id') == self._unitMap["kcal"]: activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilocalories, value=int(val.text)) if val.get('id') == self._unitMap["speedaverage"]: meterperhour = int(val.text) meterpersecond = meterperhour / 3600 activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=meterpersecond, max=None) if ride.find('LIBELLE' ).text == "" or ride.find('LIBELLE').text is None: txtdate = startdate.split(' ') activity.Name = "Sport DecathlonCoach " + txtdate[0] else: activity.Name = ride.find('LIBELLE').text activity.Private = False activity.Stationary = ride.find('MANUAL').text activity.GPS = ride.find('ABOUT').find('TRACK').text activity.AdjustTZ() activity.CalculateUID() activities.append(activity) return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] before = earliestDate = None while True: if before is not None and before < 0: break # Caused by activities that "happened" before the epoch. We generally don't care about those activities... logger.debug("Req with before=" + str(before) + "/" + str(earliestDate)) self._globalRateLimit() resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before}) if resp.status_code == 401: raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) earliestDate = None reqdata = resp.json() if not len(reqdata): break # No more activities to see for ride in reqdata: activity = UploadedActivity() activity.TZ = pytz.timezone(re.sub("^\([^\)]+\)\s*", "", ride["timezone"])) # Comes back as "(GMT -13:37) The Stuff/We Want"" activity.StartTime = pytz.utc.localize(datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ")) logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, ride["name"])) if not earliestDate or activity.StartTime < earliestDate: earliestDate = activity.StartTime before = calendar.timegm(activity.StartTime.astimezone(pytz.utc).timetuple()) activity.EndTime = activity.StartTime + timedelta(0, ride["elapsed_time"]) activity.ServiceData = {"ActivityID": ride["id"], "Manual": ride["manual"]} if ride["type"] not in self._reverseActivityTypeMappings: exclusions.append(APIExcludeActivity("Unsupported activity type %s" % ride["type"], activityId=ride["id"], userException=UserException(UserExceptionType.Other))) logger.debug("\t\tUnknown activity") continue activity.Type = self._reverseActivityTypeMappings[ride["type"]] activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=ride["distance"]) if "max_speed" in ride or "average_speed" in ride: activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.MetersPerSecond, avg=ride["average_speed"] if "average_speed" in ride else None, max=ride["max_speed"] if "max_speed" in ride else None) activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=ride["moving_time"] if "moving_time" in ride and ride["moving_time"] > 0 else None) # They don't let you manually enter this, and I think it returns 0 for those activities. # Strava doesn't handle "timer time" to the best of my knowledge - although they say they do look at the FIT total_timer_time field, so...? if "average_watts" in ride: activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, avg=ride["average_watts"]) if "average_heartrate" in ride: activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=ride["average_heartrate"])) if "max_heartrate" in ride: activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=ride["max_heartrate"])) if "average_cadence" in ride: activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=ride["average_cadence"])) if "average_temp" in ride: activity.Stats.Temperature.update(ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=ride["average_temp"])) if "calories" in ride: activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=ride["calories"]) activity.Name = ride["name"] activity.Private = ride["private"] activity.Stationary = ride["manual"] activity.GPS = ("start_latlng" in ride) and (ride["start_latlng"] is not None) activity.AdjustTZ() activity.CalculateUID() activities.append(activity) if not exhaustive or not earliestDate: break return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): # http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 cookies = self._get_cookies(record=serviceRecord) page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) self._rate_limit() res = requests.get( "http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}, cookies=cookies, ) res = res.json()["results"] if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] if "sumDistance" not in act: exclusions.append(APIExcludeActivity("No distance", activityId=act["activityId"])) continue activity = UploadedActivity() if ( "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act ): # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = True else: activity.Stationary = False try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if ( len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled" ): # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize( datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"]) / 1000) ) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta( 0, round(float(act["sumElapsedDuration"]["value"])) ) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta( minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]), ) else: activity.EndTime = pytz.utc.localize( datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"]) / 1000) ) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time. activity.Stats.Distance = ActivityStatistic( self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"]) ) def mapStat(gcKey, statKey, type, useSourceUnits=False): nonlocal activity, act if gcKey in act: value = float(act[gcKey]["value"]) if math.isinf(value): return # GC returns the minimum speed as "-Infinity" instead of 0 some times :S activity.Stats.__dict__[statKey].update( ActivityStatistic(self._unitMap[act[gcKey]["uom"]], **({type: value})) ) if useSourceUnits: activity.Stats.__dict__[statKey] = activity.Stats.__dict__[statKey].asUnits( self._unitMap[act[gcKey]["uom"]] ) if "sumMovingDuration" in act: activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Time, value=timedelta(seconds=float(act["sumMovingDuration"]["value"])) ) if "sumDuration" in act: activity.Stats.TimerTime = ActivityStatistic( ActivityStatisticUnit.Time, value=timedelta( minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]), ), ) mapStat( "minSpeed", "Speed", "min", useSourceUnits=True ) # We need to suppress conversion here, so we can fix the pace-speed issue below mapStat("maxSpeed", "Speed", "max", useSourceUnits=True) mapStat("weightedMeanSpeed", "Speed", "avg", useSourceUnits=True) mapStat("minAirTemperature", "Temperature", "min") mapStat("maxAirTemperature", "Temperature", "max") mapStat("weightedMeanAirTemperature", "Temperature", "avg") mapStat("sumEnergy", "Energy", "value") mapStat("maxHeartRate", "HR", "max") mapStat("weightedMeanHeartRate", "HR", "avg") mapStat("maxRunCadence", "RunCadence", "max") mapStat("weightedMeanRunCadence", "RunCadence", "avg") mapStat("maxBikeCadence", "Cadence", "max") mapStat("weightedMeanBikeCadence", "Cadence", "avg") mapStat("minPower", "Power", "min") mapStat("maxPower", "Power", "max") mapStat("weightedMeanPower", "Power", "avg") mapStat("minElevation", "Elevation", "min") mapStat("maxElevation", "Elevation", "max") mapStat("gainElevation", "Elevation", "gain") mapStat("lossElevation", "Elevation", "loss") # In Garmin Land, max can be smaller than min for this field :S if ( activity.Stats.Power.Max is not None and activity.Stats.Power.Min is not None and activity.Stats.Power.Min > activity.Stats.Power.Max ): activity.Stats.Power.Min = None # To get it to match what the user sees in GC. if activity.Stats.RunCadence.Max is not None: activity.Stats.RunCadence.Max *= 2 if activity.Stats.RunCadence.Average is not None: activity.Stats.RunCadence.Average *= 2 # GC incorrectly reports pace measurements as kph/mph when they are in fact in min/km or min/mi if "minSpeed" in act: if ":" in act["minSpeed"]["withUnitAbbr"] and activity.Stats.Speed.Min: activity.Stats.Speed.Min = 60 / activity.Stats.Speed.Min if "maxSpeed" in act: if ":" in act["maxSpeed"]["withUnitAbbr"] and activity.Stats.Speed.Max: activity.Stats.Speed.Max = 60 / activity.Stats.Speed.Max if "weightedMeanSpeed" in act: if ":" in act["weightedMeanSpeed"]["withUnitAbbr"] and activity.Stats.Speed.Average: activity.Stats.Speed.Average = 60 / activity.Stats.Speed.Average # Similarly, they do weird stuff with HR at times - %-of-max and zones # ...and we can't just fix these, so we have to calculate it after the fact (blegh) recalcHR = False if "maxHeartRate" in act: if "%" in act["maxHeartRate"]["withUnitAbbr"] or "z" in act["maxHeartRate"]["withUnitAbbr"]: activity.Stats.HR.Max = None recalcHR = True if "weightedMeanHeartRate" in act: if ( "%" in act["weightedMeanHeartRate"]["withUnitAbbr"] or "z" in act["weightedMeanHeartRate"]["withUnitAbbr"] ): activity.Stats.HR.Average = None recalcHR = True activity.Type = self._resolveActivityType(act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": act["activityId"], "RecalcHR": recalcHR} activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): def mapStatTriple(act, stats_obj, key, units): if "%s_max" % key in act and act["%s_max" % key]: stats_obj.update(ActivityStatistic(units, max=float(act["%s_max" % key]))) if "%s_min" % key in act and act["%s_min" % key]: stats_obj.update(ActivityStatistic(units, min=float(act["%s_min" % key]))) if "%s_avg" % key in act and act["%s_avg" % key]: stats_obj.update(ActivityStatistic(units, avg=float(act["%s_avg" % key]))) # http://ridewithgps.com/users/1/trips.json?limit=200&order_by=created_at&order_dir=asc # offset also supported activities = [] exclusions = [] # They don't actually support paging right now, for whatever reason params = self._add_auth_params({}, record=serviceRecord) res = requests.get("http://ridewithgps.com/users/{}/trips.json".format(serviceRecord.ExternalID), params=params) res = res.json() # Apparently some API users are seeing this new result format - I'm not if type(res) is dict: res = res.get("results", []) if res == []: return [], [] # No activities for act in res: if "distance" not in act: exclusions.append(APIExcludeActivity("No distance", activity_id=act["id"], user_exception=UserException(UserExceptionType.Corrupt))) continue if "duration" not in act or not act["duration"]: exclusions.append(APIExcludeActivity("No duration", activity_id=act["id"], user_exception=UserException(UserExceptionType.Corrupt))) continue activity = UploadedActivity() logger.debug("Name " + act["name"] + ":") if len(act["name"].strip()): activity.Name = act["name"] if len(act["description"].strip()): activity.Notes = act["description"] activity.GPS = act["is_gps"] activity.Stationary = not activity.GPS # I think # 0 = public, 1 = private, 2 = friends activity.Private = act["visibility"] == 1 activity.StartTime = dateutil.parser.parse(act["departed_at"]) try: activity.TZ = pytz.timezone(act["time_zone"]) except pytz.exceptions.UnknownTimeZoneError: # Sometimes the time_zone returned isn't quite what we'd like it # So, just pull the offset from the datetime if isinstance(activity.StartTime.tzinfo, tzutc): activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value. else: activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo.utcoffset(activity.StartTime).total_seconds() / 60) activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ) # Overwrite dateutil's sillyness activity.EndTime = activity.StartTime + timedelta(seconds=self._duration_to_seconds(act["duration"])) logger.debug("Activity s/t " + str(activity.StartTime)) activity.AdjustTZ() activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, float(act["distance"])) mapStatTriple(act, activity.Stats.Power, "watts", ActivityStatisticUnit.Watts) mapStatTriple(act, activity.Stats.Speed, "speed", ActivityStatisticUnit.KilometersPerHour) mapStatTriple(act, activity.Stats.Cadence, "cad", ActivityStatisticUnit.RevolutionsPerMinute) mapStatTriple(act, activity.Stats.HR, "hr", ActivityStatisticUnit.BeatsPerMinute) if "elevation_gain" in act and act["elevation_gain"]: activity.Stats.Elevation.update(ActivityStatistic(ActivityStatisticUnit.Meters, gain=float(act["elevation_gain"]))) if "elevation_loss" in act and act["elevation_loss"]: activity.Stats.Elevation.update(ActivityStatistic(ActivityStatisticUnit.Meters, loss=float(act["elevation_loss"]))) # Activity type is not implemented yet in RWGPS results; we will assume cycling, though perhaps "OTHER" wouuld be correct activity.Type = ActivityType.Cycling activity.CalculateUID() activity.ServiceData = {"ActivityID": act["id"]} activities.append(activity) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): activities = [] session = self._get_session(record=serviceRecord) session.headers.update({"Accept": "application/json"}) workouts_resp = session.get( "https://api.trainerroad.com/api/careerworkouts") if workouts_resp.status_code != 200: if workouts_resp.status_code == 401: raise APIException("Invalid login", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) raise APIException("Workout listing error") cached_record = cachedb.trainerroad_meta.find_one( {"ExternalID": serviceRecord.ExternalID}) if not cached_record: cached_workout_meta = {} else: cached_workout_meta = cached_record["Workouts"] workouts = workouts_resp.json() for workout in workouts: # Un/f their API doesn't provide the start/end times in the list response # So we need to pull the extra data, if it's not already cached workout_id = str(workout["Id"]) # Mongo doesn't do non-string keys if workout_id not in cached_workout_meta: meta_resp = session.get( "https://api.trainerroad.com/api/careerworkouts?guid=%s" % workout["Guid"]) # We don't need everything full_meta = meta_resp.json() meta = { key: full_meta[key] for key in [ "WorkoutDate", "WorkoutName", "WorkoutNotes", "TotalMinutes", "TotalKM", "AvgWatts", "Kj" ] } cached_workout_meta[workout_id] = meta else: meta = cached_workout_meta[workout_id] activity = UploadedActivity() activity.ServiceData = {"ID": int(workout_id)} activity.Name = meta["WorkoutName"] activity.Notes = meta["WorkoutNotes"] activity.Type = ActivityType.Cycling # Everything's in UTC activity.StartTime = dateutil.parser.parse( meta["WorkoutDate"]).replace(tzinfo=pytz.utc) activity.EndTime = activity.StartTime + timedelta( minutes=meta["TotalMinutes"]) activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Kilometers, value=meta["TotalKM"]) activity.Stats.Power = ActivityStatistic( ActivityStatisticUnit.Watts, avg=meta["AvgWatts"]) activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilojoules, value=meta["Kj"]) activity.Stationary = False activity.GPS = False activity.CalculateUID() activities.append(activity) cachedb.trainerroad_meta.update( {"ExternalID": serviceRecord.ExternalID}, { "ExternalID": serviceRecord.ExternalID, "Workouts": cached_workout_meta }, upsert=True) return activities, []
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] before = earliestDate = None # define low parameter limit = 20 offset = 0 sort = "desc" # get user Fitbit ID userID = svcRecord.ExternalID # get service Tapiriik ID service_id = svcRecord._id # get user "start sync from date" info # then prepare afterDate var (this var determine the date since we download activities) user = db.users.find_one({'ConnectedServices': {'$elemMatch': {'ID': service_id, 'Service': 'fitbit'}}}) afterDateObj = datetime.now() - timedelta(days=1) if user['Config']['sync_skip_before'] is not None: afterDateObj = user['Config']['sync_skip_before'] else: if exhaustive: afterDateObj = datetime.now() - timedelta(days=3650) # throw back to 10 years afterDate = afterDateObj.strftime("%Y-%m-%d") logging.info("\t Download Fitbit activities since : " + afterDate) # prepare parameters to set in fitbit request uri uri_parameters = { 'limit': limit, 'offset': offset, 'sort': sort, 'afterDate': afterDate, 'token': svcRecord.Authorization.get('AccessToken') } # set base fitbit request uri activities_uri_origin = 'https://api.fitbit.com/1/user/' + userID + '/activities/list.json' # first execute offset = 0, # offset will be set to -1 if fitbit response don't give next pagination info # offset will be incremented by 1 if fitbit response give next pagination info index_total = 0 while offset > -1: # prepare uri parameters uri_parameters['offset'] = offset # build fitbit uri with new parameters activities_uri = activities_uri_origin + "?" + urlencode(uri_parameters) # execute fitbit request using "request with auth" function (it refreshes token if needed) logging.info("\t\t downloading offset : " + str(offset)) resp = self._requestWithAuth(lambda session: session.get( activities_uri, headers={ 'Authorization': 'Bearer ' + svcRecord.Authorization.get('AccessToken') }), svcRecord) # check if request has error if resp.status_code != 204 and resp.status_code != 200: raise APIException("Unable to find Fitbit activities") # get request data data = {} try: data = resp.json() except ValueError: raise APIException("Failed parsing fitbit list response %s - %s" % (resp.status_code, resp.text)) # if request return activities infos if data['activities']: ftbt_activities = data['activities'] logging.info("\t\t nb activity : " + str(len(ftbt_activities))) # for every activities in this request pagination # (Fitbit give 20 activities MAXIMUM, use limit parameter) for ftbt_activity in ftbt_activities: index_total = index_total +1 activity = UploadedActivity() #parse date start to get timezone and date parsedDate = ftbt_activity["startTime"][0:19] + ftbt_activity["startTime"][23:] activity.StartTime = datetime.strptime(parsedDate, "%Y-%m-%dT%H:%M:%S%z") activity.TZ = pytz.utc logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, ftbt_activity["activityName"])) activity.EndTime = activity.StartTime + timedelta(0, (ftbt_activity["duration"]/1000)) activity.ServiceData = {"ActivityID": ftbt_activity["logId"], "Manual": ftbt_activity["logType"]} # check if activity type ID exists if ftbt_activity["activityTypeId"] not in self._reverseActivityTypeMappings: exclusions.append(APIExcludeActivity("Unsupported activity type %s" % ftbt_activity["activityTypeId"], activity_id=ftbt_activity["logId"], user_exception=UserException(UserExceptionType.Other))) logger.info("\t\tUnknown activity") continue activity.Type = self._reverseActivityTypeMappings[ftbt_activity["activityTypeId"]] activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=ftbt_activity["distance"]) if "speed" in ftbt_activity: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.KilometersPerHour, avg=ftbt_activity["speed"], max=ftbt_activity["speed"] ) activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=ftbt_activity["calories"]) # Todo: find fitbit data name #activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=ride[ # "moving_time"] if "moving_time" in ride and ride[ # "moving_time"] > 0 else None) # They don't let you manually enter this, and I think it returns 0 for those activities. # Todo: find fitbit data name #if "average_watts" in ride: # activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, # avg=ride["average_watts"]) if "averageHeartRate" in ftbt_activity: activity.Stats.HR.update( ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=ftbt_activity["averageHeartRate"])) # Todo: find fitbit data name #if "max_heartrate" in ride: # activity.Stats.HR.update( # ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=ride["max_heartrate"])) # Todo: find fitbit data name #if "average_cadence" in ride: # activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, # avg=ride["average_cadence"])) # Todo: find fitbit data name #if "average_temp" in ride: # activity.Stats.Temperature.update( # ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=ride["average_temp"])) if "calories" in ftbt_activity: activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=ftbt_activity["calories"]) activity.Name = ftbt_activity["activityName"] activity.Private = False if ftbt_activity['logType'] is 'manual': activity.Stationary = True else: activity.Stationary = False # Todo: find fitbit data #activity.GPS = ("start_latlng" in ride) and (ride["start_latlng"] is not None) activity.AdjustTZ() activity.CalculateUID() activities.append(activity) logging.info("\t\t Fitbit Activity ID : " + str(ftbt_activity["logId"])) if not exhaustive: break # get next info for while condition and prepare offset for next request if 'next' not in data['pagination'] or not data['pagination']['next']: next = None offset = -1 else: next = data['pagination']['next'] offset = offset + 1 logging.info("\t\t total Fitbit activities downloaded : " + str(index_total)) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 cookies = self._get_cookies(record=serviceRecord) page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz})) self._rate_limit() res = requests.get("http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}, cookies=cookies) try: res = res.json()["results"] except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s" % res.status_code) if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] if "sumDistance" not in act: exclusions.append(APIExcludeActivity("No distance", activityId=act["activityId"], userException=UserException(UserExceptionType.Corrupt))) continue activity = UploadedActivity() if "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act: # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = True else: activity.Stationary = False activity.Private = act["privacy"]["key"] == "private" try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time. activity.Stats.Distance = ActivityStatistic(self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])) activity.Type = self._resolveActivityType(act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": act["activityId"]} activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def _populate_sbr_activity(self, api_sbr_activity, usersettings): # Example JSON feed (unimportant fields have been removed) # [{ # "EventId": 63128401, # Internal ID # "EventType": 3, # Swim (3), bike (1), or run (2) # "EventDate": "4/22/2016", # "EventTime": "7:44 AM", # User's time, time zone not specified # "Planned": false, # Training plan or actual data # "TotalMinutes": 34.97, # "TotalKilometers": 1.55448, # "AverageHeartRate": 125, # "MinimumHeartRate": 100, # "MaximumHeartRate": 150, # "MemberId": 999999, # "MemberUsername": "******", # "HasDeviceUpload": true, # "DeviceUploadFile": "http://beginnertriathlete.com/discussion/storage/workouts/555555/abcd-123.fit", # "RouteName": "", # Might contain a description of the event # "Comments": "", # Same as above. Not overly often used. # }, ... ] activity = UploadedActivity() workout_id = api_sbr_activity["EventId"] eventType = api_sbr_activity["EventType"] eventDate = api_sbr_activity["EventDate"] eventTime = api_sbr_activity["EventTime"] totalMinutes = api_sbr_activity["TotalMinutes"] totalKms = api_sbr_activity["TotalKilometers"] averageHr = api_sbr_activity["AverageHeartRate"] minimumHr = api_sbr_activity["MinimumHeartRate"] maximumHr = api_sbr_activity["MaximumHeartRate"] deviceUploadFile = api_sbr_activity["DeviceUploadFile"] # Basic SBR data does not include GPS or sensor data. If this event originated from a device upload, # DownloadActivity will find it. activity.Stationary = True # Same as above- The data might be there, but it's not supplied in the basic activity feed. activity.GPS = False activity.Private = usersettings["Privacy"] activity.Type = self._workoutTypeMappings[str(eventType)] # Get the user's timezone from their profile. (Activity.TZ should be mentioned in the object hierarchy docs?) # Question: I believe if DownloadActivity finds device data, it will overwrite this. Which is OK with me. # The device data will most likely be more accurate. try: activity.TZ = pytz.timezone(usersettings["TimeZone"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.timezone(self._serverDefaultTimezone) # activity.StartTime and EndTime aren't mentioned in the object hierarchy docs, but I see them # set in all the other providers. activity.StartTime = dateutil.parser.parse( eventDate + " " + eventTime, dayfirst=False).replace(tzinfo=activity.TZ) activity.EndTime = activity.StartTime + timedelta(minutes=totalMinutes) # We can calculate some metrics from the supplied data. Would love to see some non-source code documentation # on each statistic and what it expects as input. activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=totalKms) activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=float(averageHr), min=float(minimumHr), max=float(maximumHr)) activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(totalMinutes * 60)) activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(totalMinutes * 60)) # While BT does support laps, the current API doesn't report on them - a limitation that may need to be # corrected in a future update. For now, treat manual entries as a single lap. As more and more people upload # workouts using devices anyway, this probably matters much less than it once did. lap = Lap(stats=activity.Stats, startTime=activity.StartTime, endTime=activity.EndTime) activity.Laps = [lap] # Not 100% positive how this is utilized, but it is common for all providers. Detects duplicate downloads? activity.CalculateUID() # If a device file is attached, we'll get more details about this event in DownloadActivity activity.ServiceData = { "ID": int(workout_id), "DeviceUploadFile": deviceUploadFile } return activity
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] url = self.SetioDomain + "getRunsByUserId" extID = svcRecord.ExternalID payload = {"userId": extID} headers = { 'content-type': "application/json", 'cache-control': "no-cache", } response = requests.post(url, data=json.dumps(payload), headers=headers) try: reqdata = response.json() except ValueError: raise APIException("Failed parsing Setio list response %s - %s" % (resp.status_code, resp.text)) for ride in reqdata: activity = UploadedActivity() activity.StartTime = datetime.strptime( datetime.utcfromtimestamp( ride["startTimeStamp"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") if "stopTimeStamp" in ride: activity.EndTime = datetime.strptime( datetime.utcfromtimestamp( ride["stopTimeStamp"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") activity.ServiceData = { "ActivityID": ride["runId"], "Manual": "False" } activity.Name = ride["programName"] logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, activity.Name)) activity.Type = ActivityType.Running if "totalDistance" in ride: activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=ride["totalDistance"]) if "averageCadence" in ride: activity.Stats.Cadence.update( ActivityStatistic( ActivityStatisticUnit.RevolutionsPerMinute, avg=ride["averageCadence"])) if "averageSpeed" in ride: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=ride["averageSpeed"]) # get comment url = self.SetioDomain + "getRunComment" payload = { "userId": extID, "runId": activity.ServiceData["ActivityID"] } headers = { 'content-type': "application/json", 'cache-control': "no-cache", } streamdata = requests.post(url, data=json.dumps(payload), headers=headers) if streamdata.status_code == 500: raise APIException("Internal server error") if streamdata.status_code == 403: raise APIException("No authorization to download activity", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) activity.Notes = None if streamdata.status_code == 200: # Ok try: commentdata = streamdata.json() except: raise APIException("Stream data returned is not JSON") if "comment" in commentdata: activity.Notes = commentdata["comment"] activity.GPS = True activity.Private = False activity.Stationary = False # True = no sensor data activity.CalculateUID() activities.append(activity) return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] url = self.SetioDomain + "getRunsByUserId" extID = svcRecord.ExternalID payload = {"userId": extID} headers = { 'content-type': "application/json", 'cache-control': "no-cache", } response = requests.post(url, data=json.dumps(payload), headers=headers) try: reqdata = response.json() except ValueError: raise APIException("Failed parsing Setio list response %s - %s" % (resp.status_code, resp.text)) for ride in reqdata: activity = UploadedActivity() activity.StartTime = datetime.strptime( datetime.utcfromtimestamp(ride["startTimeStamp"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") if "stopTimeStamp" in ride: activity.EndTime = datetime.strptime( datetime.utcfromtimestamp(ride["stopTimeStamp"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") activity.ServiceData = {"ActivityID": ride["runId"], "Manual": "False"} activity.Name = ride["programName"] logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, activity.Name)) activity.Type = ActivityType.Running if "totalDistance" in ride: activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=ride["totalDistance"]) if "averageCadence" in ride: activity.Stats.Cadence.update( ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=ride["averageCadence"])) if "averageSpeed" in ride: activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.MetersPerSecond, avg=ride["averageSpeed"]) # get comment url = self.SetioDomain + "getRunComment" payload = { "userId": extID, "runId": activity.ServiceData["ActivityID"]} headers = { 'content-type': "application/json", 'cache-control': "no-cache", } streamdata = requests.post(url, data=json.dumps(payload), headers=headers) if streamdata.status_code == 500: raise APIException("Internal server error") if streamdata.status_code == 403: raise APIException("No authorization to download activity", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) activity.Notes = None if streamdata.status_code == 200: # Ok try: commentdata = streamdata.json() except: raise APIException("Stream data returned is not JSON") if "comment" in commentdata: activity.Notes = commentdata["comment"] activity.GPS = True activity.Private = False activity.Stationary = False # True = no sensor data activity.CalculateUID() activities.append(activity) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({ "start": (page - 1) * pageSz, "limit": pageSz })) res = self._request_with_reauth( serviceRecord, lambda session: session.get( "https://connect.garmin.com/modern/proxy/activity-search-service-1.0/json/activities", params={ "start": (page - 1) * pageSz, "limit": pageSz })) try: res = res.json()["results"] except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res.text)) if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] activity = UploadedActivity() # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act activity.GPS = "endLatitude" in act activity.Private = act["privacy"]["key"] == "private" try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset( float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip( )) and act["activityName"][ "value"] != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize( datetime.utcfromtimestamp( float(act["beginTimestamp"]["millis"]) / 1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta( 0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta( minutes=float(act["sumDuration"] ["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"] ["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize( datetime.utcfromtimestamp( float(act["endTimestamp"]["millis"]) / 1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() if "sumDistance" in act and float( act["sumDistance"]["value"]) != 0: activity.Stats.Distance = ActivityStatistic( self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])) if "device" in act and act["device"]["key"] != "unknown": devId = DeviceIdentifier.FindMatchingIdentifierOfType( DeviceIdentifierType.GC, {"Key": act["device"]["key"]}) ver_split = act["device"]["key"].split(".") ver_maj = None ver_min = None if len(ver_split) == 4: # 2.90.0.0 ver_maj = int(ver_split[0]) ver_min = int(ver_split[1]) activity.Device = Device(devId, verMaj=ver_maj, verMin=ver_min) activity.Type = self._resolveActivityType( act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50 session = self._get_session(record=serviceRecord) page = 1 pageSz = 100 activities = [] exclusions = [] while True: logger.debug("Req with " + str({ "start": (page - 1) * pageSz, "limit": pageSz })) self._rate_limit() retried_auth = False while True: res = session.get( "http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={ "start": (page - 1) * pageSz, "limit": pageSz }) # It's 10 PM and I have no clue why it's throwing these errors, maybe we just need to log in again? if res.status_code == 403 and not retried_auth: retried_auth = True session = self._get_session(serviceRecord, skip_cache=True) else: break try: res = res.json()["results"] except ValueError: res_txt = res.text # So it can capture in the log message raise APIException("Parse failure in GC list resp: %s" % res.status_code) if "activities" not in res: break # No activities on this page - empty account. for act in res["activities"]: act = act["activity"] if "sumDistance" not in act: exclusions.append( APIExcludeActivity("No distance", activityId=act["activityId"], userException=UserException( UserExceptionType.Corrupt))) continue activity = UploadedActivity() # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped... activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act activity.GPS = "endLatitude" in act activity.Private = act["privacy"]["key"] == "private" try: activity.TZ = pytz.timezone(act["activityTimeZone"]["key"]) except pytz.exceptions.UnknownTimeZoneError: activity.TZ = pytz.FixedOffset( float(act["activityTimeZone"]["offset"]) * 60) logger.debug("Name " + act["activityName"]["value"] + ":") if len(act["activityName"]["value"].strip( )) and act["activityName"][ "value"] != "Untitled": # This doesn't work for internationalized accounts, oh well. activity.Name = act["activityName"]["value"] if len(act["activityDescription"]["value"].strip()): activity.Notes = act["activityDescription"]["value"] # beginTimestamp/endTimestamp is in UTC activity.StartTime = pytz.utc.localize( datetime.utcfromtimestamp( float(act["beginTimestamp"]["millis"]) / 1000)) if "sumElapsedDuration" in act: activity.EndTime = activity.StartTime + timedelta( 0, round(float(act["sumElapsedDuration"]["value"]))) elif "sumDuration" in act: activity.EndTime = activity.StartTime + timedelta( minutes=float(act["sumDuration"] ["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"] ["minutesSeconds"].split(":")[1])) else: activity.EndTime = pytz.utc.localize( datetime.utcfromtimestamp( float(act["endTimestamp"]["millis"]) / 1000)) logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page)) activity.AdjustTZ() # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time. activity.Stats.Distance = ActivityStatistic( self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])) activity.Type = self._resolveActivityType( act["activityType"]["key"]) activity.CalculateUID() activity.ServiceData = {"ActivityID": int(act["activityId"])} activities.append(activity) logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"])) if not exhaustive or int(res["search"]["totalPages"]) == page: break else: page += 1 return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] url = self.SingletrackerDomain + "getRidesByUserId" extID = svcRecord.ExternalID payload = {"userId": extID} headers = { 'content-type': "application/json", 'cache-control': "no-cache", } response = requests.post(url, data=json.dumps(payload), headers=headers) try: reqdata = response.json() except ValueError: raise APIException( "Failed parsing Singletracker list response %s - %s" % (resp.status_code, resp.text)) for ride in reqdata: activity = UploadedActivity() activity.StartTime = datetime.strptime( datetime.utcfromtimestamp( ride["startTime"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") if "stopTime" in ride: activity.EndTime = datetime.strptime( datetime.utcfromtimestamp( ride["stopTime"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S") activity.ServiceData = { "ActivityID": ride["rideId"], "Manual": "False" } activity.Name = ride["trackName"] logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, activity.Name)) activity.Type = ActivityType.MountainBiking if "totalDistance" in ride: activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=ride["totalDistance"]) if "avgSpeed" in ride: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=ride["avgSpeed"]) activity.Notes = None activity.GPS = True activity.Private = False activity.Stationary = False # True = no sensor data activity.CalculateUID() activities.append(activity) return activities, exclusions
def DownloadActivityList(self, serviceRecord, exhaustive=False): activities = [] exclusions = [] earliestDate = None earliestFirstPageDate = None paged = False while True: before = "" if earliestDate is None else earliestDate.astimezone( pytz.utc).strftime("%Y-%m-%d %H:%M:%S UTC") params = { "authToken": serviceRecord.Authorization["AuthToken"], "maxResults": 45, "before": before } logger.debug("Req with " + str(params)) response = requests.get( "http://api.mobile.endomondo.com/mobile/api/workout/list", params=params) if response.status_code != 200: if response.status_code == 401 or response.status_code == 403: raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) raise APIException("Unable to retrieve activity list " + str(response)) data = response.json() if "error" in data and data["error"]["type"] == "AUTH_FAILED": raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) track_ids = [] this_page_activities = [] for act in data["data"]: startTime = pytz.utc.localize( datetime.strptime(act["start_time"], "%Y-%m-%d %H:%M:%S UTC")) if earliestDate is None or startTime < earliestDate: # probably redundant, I would assume it works out the TZes... earliestDate = startTime logger.debug("activity pre") if "tracking" in act and act["tracking"]: logger.warning("\t tracking") exclusions.append( APIExcludeActivity("In progress", activityId=act["id"], permanent=False)) continue # come back once they've completed the activity track_ids.append(act["id"]) activity = UploadedActivity() activity.StartTime = startTime activity.EndTime = activity.StartTime + timedelta( 0, round(act["duration_sec"])) logger.debug("\tActivity s/t " + str(activity.StartTime)) activity.Stationary = not act["has_points"] if int(act["sport"]) in self._activityMappings: activity.Type = self._activityMappings[int(act["sport"])] activity.ServiceData = {"ActivityID": act["id"]} this_page_activities.append(activity) cached_track_tzs = cachedb.endomondo_activity_cache.find( {"TrackID": { "$in": track_ids }}) cached_track_tzs = dict([(x["TrackID"], x) for x in cached_track_tzs]) logger.debug("Have" + str(len(cached_track_tzs.keys())) + "/" + str(len(track_ids)) + " cached TZ records") for activity in this_page_activities: # attn service makers: why #(*%$ can't you all agree to use naive local time. So much simpler. cachedTrackData = None track_id = activity.ServiceData["ActivityID"] if track_id not in cached_track_tzs: logger.debug("\t Resolving TZ for %s" % activity.StartTime) cachedTrackData = self._downloadRawTrackRecord( serviceRecord, track_id) try: self._populateActivityFromTrackData( activity, cachedTrackData, minimumWaypoints=True) except APIExcludeActivity as e: e.ExternalActivityID = track_id logger.info("Encountered APIExcludeActivity %s" % str(e)) exclusions.append(e) continue if not activity.TZ and not activity.Stationary: logger.info("Couldn't determine TZ") exclusions.append( APIExcludeActivity("Couldn't determine TZ", activityId=track_id)) continue cachedTrackRecord = { "Owner": serviceRecord.ExternalID, "TrackID": track_id, "TZ": pickle.dumps(activity.TZ), "StartTime": activity.StartTime } cachedb.endomondo_activity_cache.insert(cachedTrackRecord) elif not activity.Stationary: activity.TZ = pickle.loads( cached_track_tzs[track_id]["TZ"]) activity.AdjustTZ() # Everything returned is in UTC activity.Laps = [] if int(act["sport"]) in self._activityMappings: activity.Type = self._activityMappings[int(act["sport"])] activity.ServiceData = { "ActivityID": act["id"], "ActivityData": cachedTrackData } activity.CalculateUID() activities.append(activity) if not paged: earliestFirstPageDate = earliestDate if not exhaustive or ("more" in data and data["more"] is False): break else: paged = True return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] before = earliestDate = None while True: if before is not None and before < 0: break # Caused by activities that "happened" before the epoch. We generally don't care about those activities... logger.debug("Req with before=" + str(before) + "/" + str(earliestDate)) self._globalRateLimit() resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before}) if resp.status_code == 401: raise APIException( "No authorization to retrieve activity list", block=True, user_exception=UserException( UserExceptionType.Authorization, intervention_required=True)) earliestDate = None reqdata = resp.json() if not len(reqdata): break # No more activities to see for ride in reqdata: activity = UploadedActivity() activity.TZ = pytz.timezone( re.sub("^\([^\)]+\)\s*", "", ride["timezone"]) ) # Comes back as "(GMT -13:37) The Stuff/We Want"" activity.StartTime = pytz.utc.localize( datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ")) logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, ride["name"])) if not earliestDate or activity.StartTime < earliestDate: earliestDate = activity.StartTime before = calendar.timegm( activity.StartTime.astimezone(pytz.utc).timetuple()) activity.EndTime = activity.StartTime + timedelta( 0, ride["elapsed_time"]) activity.ServiceData = { "ActivityID": ride["id"], "Manual": ride["manual"] } if ride["type"] not in self._reverseActivityTypeMappings: exclusions.append( APIExcludeActivity("Unsupported activity type %s" % ride["type"], activity_id=ride["id"], user_exception=UserException( UserExceptionType.Other))) logger.debug("\t\tUnknown activity") continue activity.Type = self._reverseActivityTypeMappings[ride["type"]] activity.Stats.Distance = ActivityStatistic( ActivityStatisticUnit.Meters, value=ride["distance"]) if "max_speed" in ride or "average_speed" in ride: activity.Stats.Speed = ActivityStatistic( ActivityStatisticUnit.MetersPerSecond, avg=ride["average_speed"] if "average_speed" in ride else None, max=ride["max_speed"] if "max_speed" in ride else None) activity.Stats.MovingTime = ActivityStatistic( ActivityStatisticUnit.Seconds, value=ride["moving_time"] if "moving_time" in ride and ride["moving_time"] > 0 else None ) # They don't let you manually enter this, and I think it returns 0 for those activities. # Strava doesn't handle "timer time" to the best of my knowledge - although they say they do look at the FIT total_timer_time field, so...? if "average_watts" in ride: activity.Stats.Power = ActivityStatistic( ActivityStatisticUnit.Watts, avg=ride["average_watts"]) if "average_heartrate" in ride: activity.Stats.HR.update( ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=ride["average_heartrate"])) if "max_heartrate" in ride: activity.Stats.HR.update( ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=ride["max_heartrate"])) if "average_cadence" in ride: activity.Stats.Cadence.update( ActivityStatistic( ActivityStatisticUnit.RevolutionsPerMinute, avg=ride["average_cadence"])) if "average_temp" in ride: activity.Stats.Temperature.update( ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=ride["average_temp"])) if "calories" in ride: activity.Stats.Energy = ActivityStatistic( ActivityStatisticUnit.Kilocalories, value=ride["calories"]) activity.Name = ride["name"] activity.Private = ride["private"] activity.Stationary = ride["manual"] activity.GPS = ("start_latlng" in ride) and (ride["start_latlng"] is not None) activity.AdjustTZ() activity.CalculateUID() activities.append(activity) if not exhaustive or not earliestDate: break return activities, exclusions
def DownloadActivityList(self, svcRecord, exhaustive=False): activities = [] exclusions = [] before = earliestDate = None while True: logger.debug("Req with before=" + str(before) + "/" + str(earliestDate)) resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before}) self._logAPICall("list", (svcRecord.ExternalID, str(earliestDate)), resp.status_code == 401) if resp.status_code == 401: raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) earliestDate = None reqdata = resp.json() if not len(reqdata): break # No more activities to see for ride in reqdata: activity = UploadedActivity() activity.TZ = pytz.timezone(re.sub("^\([^\)]+\)\s*", "", ride["timezone"])) # Comes back as "(GMT -13:37) The Stuff/We Want"" activity.StartTime = pytz.utc.localize(datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ")) logger.debug("\tActivity s/t " + str(activity.StartTime)) if not earliestDate or activity.StartTime < earliestDate: earliestDate = activity.StartTime before = calendar.timegm(activity.StartTime.astimezone(pytz.utc).timetuple()) manual = False # Determines if we bother to "download" the activity afterwards if ride["start_latlng"] is None or ride["end_latlng"] is None: manual = True activity.EndTime = activity.StartTime + timedelta(0, ride["elapsed_time"]) activity.ServiceData = {"ActivityID": ride["id"], "Manual": manual} actType = [k for k, v in self._reverseActivityTypeMappings.items() if v == ride["type"]] if not len(actType): exclusions.append(APIExcludeActivity("Unsupported activity type %s" % ride["type"], activityId=ride["id"])) logger.debug("\t\tUnknown activity") continue activity.Type = actType[0] activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=ride["distance"]) if "max_speed" in ride or "average_speed" in ride: activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour, avg=ride["average_speed"] if "average_speed" in ride else None, max=ride["max_speed"] if "max_speed" in ride else None) activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Time, value=timedelta(seconds=ride["moving_time"]) if "moving_time" in ride and ride["moving_time"] > 0 else None) # They don't let you manually enter this, and I think it returns 0 for those activities. activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=ride["calories"] if "calories" in ride else None) if "average_watts" in ride: activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, avg=ride["average_watts"]) if "average_heartrate" in ride: activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=ride["average_heartrate"])) if "max_heartrate" in ride: activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=ride["max_heartrate"])) if "average_cadence" in ride: activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=ride["average_cadence"])) if "average_temp" in ride: activity.Stats.Temperature.update(ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=ride["average_temp"])) if "calories" in ride: activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=ride["calories"]) activity.Name = ride["name"] activity.Private = ride["private"] activity.Stationary = manual activity.AdjustTZ() activity.CalculateUID() activities.append(activity) if not exhaustive or not earliestDate: break return activities, exclusions