Exemplo n.º 1
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?limit=20&start=0
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz}))

            res = self._request_with_reauth(lambda session: session.get("https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}), serviceRecord)

            try:
                res = res.json()
            except ValueError:
                res_txt = res.text # So it can capture in the log message
                raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res_txt))
            for act in res:
                activity = UploadedActivity()
                # stationary activities have movingDuration = None while non-gps static activities have 0.0
                activity.Stationary = act["movingDuration"] is None
                activity.GPS = act["hasPolyline"]

                activity.Private = act["privacy"]["typeKey"] == "private"

                activity_name = act["activityName"]
                logger.debug("Name " + activity_name if activity_name is not None else "Untitled" + ":")
                if activity_name is not None and len(activity_name.strip()) and activity_name != "Untitled": # This doesn't work for internationalized accounts, oh well.
                    activity.Name = activity_name

                activity_description = act["description"]
                if activity_description is not None and len(activity_description.strip()):
                    activity.Notes = activity_description

                activity.StartTime = pytz.utc.localize(datetime.strptime(act["startTimeGMT"], "%Y-%m-%d %H:%M:%S"))
                if act["elapsedDuration"] is not None:
                    activity.EndTime = activity.StartTime + timedelta(0, float(act["elapsedDuration"])/1000)
                elif act["duration"] is not None:
                    activity.EndTime = activity.StartTime + timedelta(0, float(act["duration"]))
                else:
                    # somehow duration is not defined. Set 1 second then.
                    activity.EndTime = activity.StartTime + timedelta(0, 1)

                logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page))

                if "distance" in act and act["distance"] and float(act["distance"]) != 0:
                    activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(act["distance"]))

                activity.Type = self._resolveActivityType(act["activityType"]["typeKey"])

                activity.CalculateUID()

                activity.ServiceData = {"ActivityID": int(act["activityId"])}

                activities.append(activity)
            logger.debug("Finished page " + str(page))
            if not exhaustive or len(res) == 0:
                break
            else:
                page += 1
        return activities, exclusions
Exemplo n.º 2
0
    def _create_activity(self, data):
        activity = UploadedActivity()
        activity.Name = data.get("name")
        activity.StartTime = pytz.utc.localize(datetime.strptime(data.get("start_at"), "%Y-%m-%dT%H:%M:%SZ"))
        activity.EndTime = activity.StartTime + timedelta(0, float(data.get("duration")))
        sport_id = data.get("sport_id")
        activity.Type = self._reverseActivityMappings.get(int(sport_id), ActivityType.Other) if sport_id else ActivityType.Other

        distance = data.get("distance")
        activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(distance) if distance else None)
        activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(data.get("total_time_in_seconds")))
        avg_speed = data.get("average_speed")
        max_speed = data.get("max_speed")
        activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour, avg=float(avg_speed) if avg_speed else None, max=float(max_speed) if max_speed else None)
        avg_hr = data.get("average_heart_rate")
        max_hr = data.get("maximum_heart_rate")
        activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=float(avg_hr) if avg_hr else None, max=float(max_hr) if max_hr else None))
        calories = data.get("calories")
        activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=int(calories) if calories else None)

        activity.ServiceData = {"ActivityID": data.get("id")}

        logger.debug("\tActivity s/t {}: {}".format(activity.StartTime, activity.Type))
        activity.CalculateUID()
        return activity
Exemplo n.º 3
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        activities = []
        exclusions = []

        for act in self._getActivities(serviceRecord, exhaustive=exhaustive):
            activity = UploadedActivity()
            activity.StartTime = dateutil.parser.parse(act['startDateTimeLocal'])
            activity.EndTime = activity.StartTime + timedelta(seconds=act['duration'])
            _type = self._activityMappings.get(act['activityType'])
            if not _type:
                exclusions.append(APIExcludeActivity("Unsupported activity type %s" % act['activityType'],
                                                     activity_id=act["activityId"],
                                                     user_exception=UserException(UserExceptionType.Other)))
            activity.ServiceData = {"ActivityID": act['activityId']}
            activity.Type = _type
            activity.Notes = act['notes']
            activity.GPS = bool(act.get('startLatitude'))
            activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=act['distance'])
            activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=act['calories'])
            if 'heartRateMin' in act:
                activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, min=act['heartRateMin'],
                                                      max=act['heartRateMax'], avg=act['heartRateAverage'])
            activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=act['duration'])

            if 'temperature' in act:
                activity.Stats.Temperature = ActivityStatistic(ActivityStatisticUnit.DegreesCelcius,
                                                               avg=act['temperature'])
            activity.CalculateUID()
            logger.debug("\tActivity s/t %s", activity.StartTime)
            activities.append(activity)

        return activities, exclusions
Exemplo n.º 4
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        activities = []
        exclusions = []

        for act in self._getActivities(serviceRecord, exhaustive=exhaustive):
            activity = UploadedActivity()
            activity.StartTime = dateutil.parser.parse(act['startDateTimeLocal'])
            activity.EndTime = activity.StartTime + timedelta(seconds=act['duration'])
            _type = self._activityMappings.get(act['activityType'])
            if not _type:
                exclusions.append(APIExcludeActivity("Unsupported activity type %s" % act['activityType'],
                                                     activity_id=act["activityId"],
                                                     user_exception=UserException(UserExceptionType.Other)))
            activity.ServiceData = {"ActivityID": act['activityId']}
            activity.Type = _type
            activity.Notes = act['notes']
            activity.GPS = bool(act.get('startLatitude'))
            activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=act['distance'])
            activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=act['calories'])
            if 'heartRateMin' in act:
                activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, min=act['heartRateMin'],
                                                      max=act['heartRateMax'], avg=act['heartRateAverage'])
            activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=act['duration'])

            if 'temperature' in act:
                activity.Stats.Temperature = ActivityStatistic(ActivityStatisticUnit.DegreesCelcius,
                                                               avg=act['temperature'])
            activity.CalculateUID()
            logger.debug("\tActivity s/t %s", activity.StartTime)
            activities.append(activity)

        return activities, exclusions
Exemplo n.º 5
0
    def _create_activity(self, activity_data):
        activity = UploadedActivity()

        activity.GPS = not activity_data["has-route"]
        if "detailed-sport-info" in activity_data and activity_data["detailed-sport-info"] in self._reverse_activity_type_mappings:
            activity.Type = self._reverse_activity_type_mappings[activity_data["detailed-sport-info"]]
        else:
            activity.Type = ActivityType.Other

        activity.StartTime = pytz.utc.localize(isodate.parse_datetime(activity_data["start-time"]))
        activity.EndTime = activity.StartTime + isodate.parse_duration(activity_data["duration"])

        distance = activity_data["distance"] if "distance" in activity_data else None
        activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(distance) if distance else None)
        hr_data = activity_data["heart-rate"] if "heart-rate" in activity_data else None
        avg_hr = hr_data["average"] if "average" in hr_data else None
        max_hr = hr_data["maximum"] if "maximum" in hr_data else None
        activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=float(avg_hr) if avg_hr else None, max=float(max_hr) if max_hr else None))
        calories = activity_data["calories"] if "calories" in activity_data else None
        activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=int(calories) if calories else None)

        activity.ServiceData = {"ActivityID": activity_data["id"]}

        logger.debug("\tActivity s/t {}: {}".format(activity.StartTime, activity.Type))

        activity.CalculateUID()
        return activity
Exemplo n.º 6
0
    def _create_activity(self, data):
        activity = UploadedActivity()
        activity.Name = data.get("name")
        activity.StartTime = pytz.utc.localize(datetime.strptime(data.get("start_at"), "%Y-%m-%dT%H:%M:%SZ"))
        activity.EndTime = activity.StartTime + timedelta(0, float(data.get("duration")))
        sport_id = data.get("sport_id")
        activity.Type = self._reverseActivityMappings.get(int(sport_id), ActivityType.Other) if sport_id else ActivityType.Other

        distance = data.get("distance")
        activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(distance) if distance else None)
        activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(data.get("total_time_in_seconds")))
        avg_speed = data.get("average_speed")
        max_speed = data.get("max_speed")
        activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour, avg=float(avg_speed) if avg_speed else None, max=float(max_speed) if max_speed else None)
        avg_hr = data.get("average_heart_rate")
        max_hr = data.get("maximum_heart_rate")
        activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=float(avg_hr) if avg_hr else None, max=float(max_hr) if max_hr else None))
        calories = data.get("calories")
        activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=int(calories) if calories else None)

        activity.ServiceData = {"ActivityID": data.get("id")}

        logger.debug("\tActivity s/t {}: {}".format(activity.StartTime, activity.Type))
        activity.CalculateUID()
        return activity
Exemplo n.º 7
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        session = self._get_session(serviceRecord)
        list_params = self._with_auth(session, {"count": 20, "offset": 1})

        activities = []
        exclusions = []

        while True:
            list_resp = session.get("https://api.nike.com/me/sport/activities",
                                    params=list_params)
            list_resp = list_resp.json()

            for act in list_resp["data"]:
                activity = UploadedActivity()
                activity.ServiceData = {"ID": act["activityId"]}

                if act["status"] != "COMPLETE":
                    exclusions.append(
                        APIExcludeActivity(
                            "Not complete",
                            activity_id=act["activityId"],
                            permanent=False,
                            user_exception=UserException(
                                UserExceptionType.LiveTracking)))
                    continue

                activity.StartTime = dateutil.parser.parse(
                    act["startTime"]).replace(tzinfo=pytz.utc)
                activity.EndTime = activity.StartTime + self._durationToTimespan(
                    act["metricSummary"]["duration"])

                tz_name = act["activityTimeZone"]

                # They say these are all IANA standard names - they aren't
                if tz_name in self._timezones:
                    tz_name = self._timezones[tz_name]

                activity.TZ = pytz.timezone(tz_name)

                if act["activityType"] in self._activityMappings:
                    activity.Type = self._activityMappings[act["activityType"]]

                activity.Stats.Distance = ActivityStatistic(
                    ActivityStatisticUnit.Kilometers,
                    value=float(act["metricSummary"]["distance"]))
                activity.Stats.Strides = ActivityStatistic(
                    ActivityStatisticUnit.Strides,
                    value=int(act["metricSummary"]["steps"]))
                activity.Stats.Energy = ActivityStatistic(
                    ActivityStatisticUnit.Kilocalories,
                    value=float(act["metricSummary"]["calories"]))
                activity.CalculateUID()
                activities.append(activity)

            if len(list_resp["data"]) == 0 or not exhaustive:
                break
            list_params["offset"] += list_params["count"]

        return activities, exclusions
Exemplo n.º 8
0
    def DownloadActivityList(self, svcRec, exhaustive=False):
        dbcl = self._getClient(svcRec)
        if not svcRec.Authorization["Full"]:
            syncRoot = "/"
        else:
            syncRoot = svcRec.Config["SyncRoot"]
        cache = cachedb.dropbox_cache.find_one({"ExternalID": svcRec.ExternalID})
        if cache is None:
            cache = {"ExternalID": svcRec.ExternalID, "Structure": [], "Activities": {}}
        if "Structure" not in cache:
            cache["Structure"] = []
        self._folderRecurse(cache["Structure"], dbcl, syncRoot)

        activities = []
        exclusions = []

        for dir in cache["Structure"]:
            for file in dir["Files"]:
                path = file["Path"]
                if svcRec.Authorization["Full"]:
                    relPath = path.replace(syncRoot, "", 1)
                else:
                    relPath = path.replace("/Apps/tapiriik/", "", 1)  # dropbox api is meh api

                existing = [(k, x) for k, x in cache["Activities"].items() if x["Path"] == relPath]  # path is relative to syncroot to reduce churn if they relocate it
                existing = existing[0] if existing else None
                if existing is not None:
                    existUID, existing = existing
                if existing and existing["Rev"] == file["Rev"]:
                    # don't need entire activity loaded here, just UID
                    act = UploadedActivity()
                    act.UID = existUID
                    act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y %z")
                    if "EndTime" in existing:  # some cached activities may not have this, it is not essential
                        act.EndTime = datetime.strptime(existing["EndTime"], "%H:%M:%S %d %m %Y %z")
                else:
                    logger.debug("Retrieving %s (%s)" % (path, "outdated meta cache" if existing else "not in meta cache"))
                    # get the full activity
                    try:
                        act, rev = self._getActivity(svcRec, dbcl, path)
                    except APIExcludeActivity as e:
                        logger.info("Encountered APIExcludeActivity %s" % str(e))
                        exclusions.append(e)
                        continue
                    del act.Laps
                    act.Laps = []  # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM.
                    cache["Activities"][act.UID] = {"Rev": rev, "Path": relPath, "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"), "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")}
                tagRes = self._tagActivity(relPath)
                act.ServiceData = {"Path": path, "Tagged":tagRes is not None}

                act.Type = tagRes if tagRes is not None else ActivityType.Other

                logger.debug("Activity s/t %s" % act.StartTime)

                activities.append(act)

        cachedb.dropbox_cache.update({"ExternalID": svcRec.ExternalID}, cache, upsert=True)
        return activities, exclusions
Exemplo n.º 9
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        cookies = self._get_cookies(record=serviceRecord)
        activities = []
        exclusions = []
        pageUri = self.OpenFitEndpoint + "/fitnessActivities.json"
        while True:
            logger.debug("Req against " + pageUri)
            res = requests.get(pageUri, cookies=cookies)
            res = res.json()
            for act in res["items"]:
                activity = UploadedActivity()
                activity.ServiceData = {"ActivityURI": act["uri"]}

                if len(act["name"].strip()):
                    activity.Name = act["name"]
                activity.StartTime = dateutil.parser.parse(act["start_time"])
                if isinstance(activity.StartTime.tzinfo, tzutc):
                    activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value.
                else:
                    activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo._offset.total_seconds() / 60)  # Convert the dateutil lame timezones into pytz awesome timezones.

                activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ)
                activity.EndTime = activity.StartTime + timedelta(seconds=float(act["duration"]))
                activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Time, value=timedelta(seconds=float(act["duration"])))  # OpenFit says this excludes paused times.

                # Sometimes activities get returned with a UTC timezone even when they are clearly not in UTC.
                if activity.TZ == pytz.utc:
                    # So, we get the first location in the activity and calculate the TZ from that.
                    try:
                        firstLocation = self._downloadActivity(serviceRecord, activity, returnFirstLocation=True)
                    except APIExcludeActivity:
                        pass
                    else:
                        activity.CalculateTZ(firstLocation)
                        activity.AdjustTZ()

                logger.debug("Activity s/t " + str(activity.StartTime))
                activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(act["total_distance"]))

                types = [x.strip().lower() for x in act["type"].split(":")]
                types.reverse()  # The incoming format is like "walking: hiking" and we want the most specific first
                activity.Type = None
                for type_key in types:
                    if type_key in self._activityMappings:
                        activity.Type = self._activityMappings[type_key]
                        break
                if not activity.Type:
                    exclusions.append(APIExcludeActivity("Unknown activity type %s" % act["type"], activityId=act["uri"]))
                    continue

                activity.CalculateUID()
                activities.append(activity)
            if not exhaustive or "next" not in res or not len(res["next"]):
                break
            else:
                pageUri = res["next"]
        return activities, exclusions
Exemplo n.º 10
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?limit=20&start=0
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz}))

            res = self._request_with_reauth(lambda session: session.get("https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}), serviceRecord)

            try:
                res = res.json()
            except ValueError:
                res_txt = res.text # So it can capture in the log message
                raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res_txt))
            for act in res:
                activity = UploadedActivity()
                # stationary activities have movingDuration = None while non-gps static activities have 0.0
                activity.Stationary = act["movingDuration"] is None
                activity.GPS = act["hasPolyline"]

                activity.Private = act["privacy"]["typeKey"] == "private"

                activity_name = act["activityName"]
                logger.debug("Name " + activity_name if activity_name is not None else "Untitled" + ":")
                if activity_name is not None and len(activity_name.strip()) and activity_name != "Untitled": # This doesn't work for internationalized accounts, oh well.
                    activity.Name = activity_name

                activity_description = act["description"]
                if activity_description is not None and len(activity_description.strip()):
                    activity.Notes = activity_description

                activity.StartTime = pytz.utc.localize(datetime.strptime(act["startTimeGMT"], "%Y-%m-%d %H:%M:%S"))
                if act["elapsedDuration"] is not None:
                    activity.EndTime = activity.StartTime + timedelta(0, float(act["elapsedDuration"])/1000)
                else:
                    activity.EndTime = activity.StartTime + timedelta(0, float(act["duration"]))

                logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page))

                if "distance" in act and act["distance"] and float(act["distance"]) != 0:
                    activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(act["distance"]))

                activity.Type = self._resolveActivityType(act["activityType"]["typeKey"])

                activity.CalculateUID()

                activity.ServiceData = {"ActivityID": int(act["activityId"])}

                activities.append(activity)
            logger.debug("Finished page " + str(page))
            if not exhaustive or len(res) == 0:
                break
            else:
                page += 1
        return activities, exclusions
Exemplo n.º 11
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        logger.debug("Checking motivato premium state")
        self._applyPaymentState(serviceRecord)

        logger.debug("Motivato DownloadActivityList")
        session = self._get_session(record=serviceRecord)
        activities = []
        exclusions = []

        self._rate_limit()

        retried_auth = False
        #headers = {'X-App-With-Tracks': "true"}
        headers = {}
        res = session.post(self._urlRoot + "/api/workouts/sync", headers=headers)

        if res.status_code == 403 and not retried_auth:
            retried_auth = True
            session = self._get_session(serviceRecord, skip_cache=True)

        try:
            respList = res.json();
        except ValueError:
            res_txt = res.text # So it can capture in the log message
            raise APIException("Parse failure in Motivato list resp: %s" % res.status_code)

        for actInfo in respList:
            if "duration" in actInfo:
                duration = self._durationToSeconds(actInfo["duration"])
            else:
                continue

            activity = UploadedActivity()
            if "time_start" in actInfo["metas"]:
                startTimeStr = actInfo["training_at"] + " " + actInfo["metas"]["time_start"]
            else:
                startTimeStr = actInfo["training_at"] + " 00:00:00"

            activity.StartTime = self._parseDateTime(startTimeStr)
            activity.EndTime = self._parseDateTime(startTimeStr) + timedelta(seconds=duration)
            activity.Type = self._reverseActivityMappings[actInfo["discipline_id"]]
            activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=duration)
            if "distance" in actInfo:
                activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(actInfo["distance"]))
            #activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerSecond, value=1.0/float(actInfo["metas"]["pace"]))

            activity.ServiceData={"WorkoutID": int(actInfo["id"])}

            activity.CalculateUID()
            logger.debug("Generated UID %s" % activity.UID)
            activities.append(activity)


        return activities, exclusions
Exemplo n.º 12
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        activities = []
        session = self._get_session(record=serviceRecord)
        session.headers.update({"Accept": "application/json"})
        workouts_resp = session.get("https://api.trainerroad.com/api/careerworkouts")

        if workouts_resp.status_code != 200:
            if workouts_resp.status_code == 401:
                raise APIException("Invalid login", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True))
            raise APIException("Workout listing error")

        cached_record = cachedb.trainerroad_meta.find_one({"ExternalID": serviceRecord.ExternalID})
        if not cached_record:
            cached_workout_meta = {}
        else:
            cached_workout_meta = cached_record["Workouts"]

        workouts = workouts_resp.json()
        for workout in workouts:
            # Un/f their API doesn't provide the start/end times in the list response
            # So we need to pull the extra data, if it's not already cached
            workout_id = str(workout["Id"]) # Mongo doesn't do non-string keys
            if workout_id not in cached_workout_meta:
                meta_resp = session.get("https://api.trainerroad.com/api/careerworkouts?guid=%s" % workout["Guid"])
                # We don't need everything
                full_meta = meta_resp.json()
                meta = {key: full_meta[key] for key in ["WorkoutDate", "WorkoutName", "WorkoutNotes", "TotalMinutes", "TotalKM", "AvgWatts", "Kj"]}
                cached_workout_meta[workout_id] = meta
            else:
                meta = cached_workout_meta[workout_id]

            activity = UploadedActivity()
            activity.ServiceData = {"ID": int(workout_id)}
            activity.Name = meta["WorkoutName"]
            activity.Notes = meta["WorkoutNotes"]
            activity.Type = ActivityType.Cycling

            # Everything's in UTC
            activity.StartTime = dateutil.parser.parse(meta["WorkoutDate"]).replace(tzinfo=pytz.utc)
            activity.EndTime = activity.StartTime + timedelta(minutes=meta["TotalMinutes"])

            activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=meta["TotalKM"])
            activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, avg=meta["AvgWatts"])
            activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilojoules, value=meta["Kj"])

            activity.Stationary = False
            activity.GPS = False
            activity.CalculateUID()

            activities.append(activity)

        cachedb.trainerroad_meta.update({"ExternalID": serviceRecord.ExternalID}, {"ExternalID": serviceRecord.ExternalID, "Workouts": cached_workout_meta}, upsert=True)

        return activities, []
Exemplo n.º 13
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []

        url = self.SingletrackerDomain + "getRidesByUserId"
        extID = svcRecord.ExternalID

        payload = {"userId": extID}
        headers = {
            'content-type': "application/json",
            'cache-control': "no-cache",
        }
        response = requests.post(url, data=json.dumps(payload), headers=headers)
        try:
            reqdata = response.json()
        except ValueError:
            raise APIException("Failed parsing Singletracker list response %s - %s" % (resp.status_code, resp.text))

        for ride in reqdata:
            activity = UploadedActivity()
            activity.StartTime = datetime.strptime(
                datetime.utcfromtimestamp(ride["startTime"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S")
            if "stopTime" in ride:
                activity.EndTime = datetime.strptime(
                    datetime.utcfromtimestamp(ride["stopTime"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S")
            activity.ServiceData = {"ActivityID": ride["rideId"], "Manual": "False"}

            activity.Name = ride["trackName"]

            logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, activity.Name))
            activity.Type = ActivityType.MountainBiking
            if "totalDistance" in ride:
                activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=ride["totalDistance"])

            if "avgSpeed" in ride:
                activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.MetersPerSecond,
                                                         avg=ride["avgSpeed"])
            activity.Notes = None

            activity.GPS = True

            activity.Private = False
            activity.Stationary = False  # True = no sensor data

            activity.CalculateUID()
            activities.append(activity)

        return activities, exclusions
Exemplo n.º 14
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        session = self._get_session(serviceRecord)
        list_params = self._with_auth(session, {"count": 20, "offset": 1})

        activities = []
        exclusions = []

        while True:
            list_resp = session.get("https://api.nike.com/me/sport/activities", params=list_params)
            list_resp = list_resp.json()

            for act in list_resp["data"]:
                activity = UploadedActivity()
                activity.ServiceData = {"ID": act["activityId"]}

                if act["status"] != "COMPLETE":
                    exclusions.append(APIExcludeActivity("Not complete", activityId=act["activityId"], permanent=False, userException=UserException(UserExceptionType.LiveTracking)))
                    continue

                activity.StartTime = dateutil.parser.parse(act["startTime"]).replace(tzinfo=pytz.utc)
                activity.EndTime = activity.StartTime + self._durationToTimespan(act["metricSummary"]["duration"])

                tz_name = act["activityTimeZone"]

                # They say these are all IANA standard names - they aren't
                if tz_name in self._timezones:
                    tz_name = self._timezones[tz_name]

                activity.TZ = pytz.timezone(tz_name)

                if act["activityType"] in self._activityMappings:
                    activity.Type = self._activityMappings[act["activityType"]]

                activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(act["metricSummary"]["distance"]))
                activity.Stats.Strides = ActivityStatistic(ActivityStatisticUnit.Strides, value=int(act["metricSummary"]["steps"]))
                activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=float(act["metricSummary"]["calories"]))
                activity.CalculateUID()
                activities.append(activity)

            if len(list_resp["data"]) == 0 or not exhaustive:
                break
            list_params["offset"] += list_params["count"]

        return activities, exclusions
Exemplo n.º 15
0
    def _create_post(self, data):
        post = UploadedActivity()
        post.Type = ActivityType.Report
        post.Stationary = True

        post_xml = data.find("post")

        post.Name = post_xml.get("title")
        post.NotesExt = post_xml.get("formatted_body")
        post.StartTime = pytz.utc.localize(datetime.strptime(post_xml.get("created_at"), "%Y-%m-%dT%H:%M:%SZ"))
        #need to set EndTime for consistency
        post.EndTime = post.StartTime

        post.ServiceData = {"ActivityID": post_xml.get("id")}

        if int(post_xml.get("photos_count")) > 0:
            for photo_xml in data.findall("photos/photo"):
                post.PhotoUrls.append({"id": photo_xml.get("id"), "url": photo_xml.get("image_original")})

        logger.debug("\tPost s/t {}: {}".format(post.StartTime, post.Type))
        post.CalculateUID()
        return post
Exemplo n.º 16
0
    def DownloadActivityList(self, svcRec, exhaustive=False):
        dbcl = self._getClient(svcRec)
        if not svcRec.Authorization["Full"]:
            syncRoot = "/"
        else:
            syncRoot = svcRec.Config["SyncRoot"]
        cache = cachedb.dropbox_cache.find_one({"ExternalID": svcRec.ExternalID})
        if cache is None:
            cache = {"ExternalID": svcRec.ExternalID, "Structure": [], "Activities": {}}
        if "Structure" not in cache:
            cache["Structure"] = []
        self._folderRecurse(cache["Structure"], dbcl, syncRoot)

        activities = []
        exclusions = []

        for dir in cache["Structure"]:
            for file in dir["Files"]:
                path = file["Path"]
                if svcRec.Authorization["Full"]:
                    relPath = path.replace(syncRoot, "", 1)
                else:
                    relPath = path.replace("/Apps/tapiriik/", "", 1)  # dropbox api is meh api

                hashedRelPath = self._hash_path(relPath)
                if hashedRelPath in cache["Activities"]:
                    existing = cache["Activities"][hashedRelPath]
                else:
                    existing = None

                if not existing:
                    # Continue to use the old records keyed by UID where possible
                    existing = [
                        (k, x) for k, x in cache["Activities"].items() if "Path" in x and x["Path"] == relPath
                    ]  # path is relative to syncroot to reduce churn if they relocate it
                    existing = existing[0] if existing else None
                    if existing is not None:
                        existUID, existing = existing
                        existing["UID"] = existUID

                if existing and existing["Rev"] == file["Rev"]:
                    # don't need entire activity loaded here, just UID
                    act = UploadedActivity()
                    act.UID = existing["UID"]
                    try:
                        act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y %z")
                    except:
                        act.StartTime = datetime.strptime(
                            existing["StartTime"], "%H:%M:%S %d %m %Y"
                        )  # Exactly one user has managed to break %z :S
                    if "EndTime" in existing:  # some cached activities may not have this, it is not essential
                        act.EndTime = datetime.strptime(existing["EndTime"], "%H:%M:%S %d %m %Y %z")
                else:
                    logger.debug(
                        "Retrieving %s (%s)" % (path, "outdated meta cache" if existing else "not in meta cache")
                    )
                    # get the full activity
                    try:
                        act, rev = self._getActivity(svcRec, dbcl, path)
                    except APIExcludeActivity as e:
                        logger.info("Encountered APIExcludeActivity %s" % str(e))
                        exclusions.append(strip_context(e))
                        continue

                    try:
                        act.EnsureTZ()
                    except:
                        pass  # We tried.

                    if hasattr(act, "OriginatedFromTapiriik") and not act.CountTotalWaypoints():
                        # This is one of the files created when TCX export was hopelessly broken for non-GPS activities.
                        # Right now, no activities in dropbox from tapiriik should be devoid of waypoints - since dropbox doesn't receive stationary activities
                        # In the future when this changes, will obviously have to modify this code to also look at modification dates or similar.
                        if ".tcx.summary-data" in path:
                            logger.info("...summary file already moved")
                        else:
                            logger.info("...moving summary-only file")
                            dbcl.file_move(path, path.replace(".tcx", ".tcx.summary-data"))
                        continue  # DON'T include in listing - it'll be regenerated
                    del act.Laps
                    act.Laps = (
                        []
                    )  # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM.
                    cache["Activities"][hashedRelPath] = {
                        "Rev": rev,
                        "UID": act.UID,
                        "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"),
                        "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z"),
                    }
                tagRes = self._tagActivity(relPath)
                act.ServiceData = {"Path": path, "Tagged": tagRes is not None}

                act.Type = tagRes if tagRes is not None else ActivityType.Other

                logger.debug("Activity s/t %s" % act.StartTime)

                activities.append(act)

        if "_id" in cache:
            cachedb.dropbox_cache.save(cache)
        else:
            cachedb.dropbox_cache.insert(cache)
        return activities, exclusions
Exemplo n.º 17
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        """
        GET List of Activities as JSON File

        URL: https://app.velohero.com/export/workouts/json
        Parameters:
        user      = username
        pass      = password
        date_from = YYYY-MM-DD
        date_to   = YYYY-MM-DD
        """
        activities           = []
        exclusions           = []
        discoveredWorkoutIds = []

        params = self._add_auth_params({}, record=serviceRecord)

        limitDateFormat = "%Y-%m-%d"

        if exhaustive:
            listEnd   = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in
            listStart = datetime(day=1, month=1, year=1980) # The beginning of time
        else:
            listEnd = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in
            listStart = listEnd - timedelta(days=20) # Doesn't really matter

        params.update({"date_from": listStart.strftime(limitDateFormat), "date_to": listEnd.strftime(limitDateFormat)})
        logger.debug("Requesting %s to %s" % (listStart, listEnd))
        res = requests.get(self._urlRoot + "/export/workouts/json",
                           headers=self._obligatory_headers,
                           params=params)

        if res.status_code != 200:
          if res.status_code == 403:
            raise APIException("Invalid login", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True))
          raise APIException("Unable to retrieve activity list")

        res.raise_for_status()
        try:
            res = res.json()
        except ValueError:
            raise APIException("Could not decode activity list")
        if "workouts" not in res:
            raise APIException("No activities")
        for workout in res["workouts"]:
            workoutId = int(workout["id"])
            if workoutId in discoveredWorkoutIds:
               continue # There's the possibility of query overlap
            discoveredWorkoutIds.append(workoutId)
            if workout["file"] is not "1":
               logger.debug("Skip workout with ID: " + str(workoutId) + " (no file)")
               continue # Skip activity without samples (no PWX export)

            activity = UploadedActivity()

            logger.debug("Workout ID: " + str(workoutId))
            # Duration (dur_time)
            duration = self._durationToSeconds(workout["dur_time"])
            activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=duration)
            # Start time (date_ymd, start_time)
            startTimeStr = workout["date_ymd"] + " " + workout["start_time"]
            activity.StartTime = self._parseDateTime(startTimeStr)
            # End time (date_ymd, start_time) + dur_time
            activity.EndTime = self._parseDateTime(startTimeStr) + timedelta(seconds=duration)
            # Sport (sport_id)
            if workout["sport_id"] in self._reverseActivityMappings:
                activity.Type = self._reverseActivityMappings[workout["sport_id"]]
            else:
                activity.Type = ActivityType.Other
            # Distance (dist_km)
            activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(workout["dist_km"]))
            # Workout is hidden
            activity.Private = workout["hide"] == "1"

            activity.ServiceData = {"workoutId": workoutId}
            activity.CalculateUID()
            activities.append(activity)

        return activities, exclusions
Exemplo n.º 18
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50
        session = self._get_session(record=serviceRecord)
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({
                "start": (page - 1) * pageSz,
                "limit": pageSz
            }))
            self._rate_limit()

            retried_auth = False
            while True:
                res = session.get(
                    "http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities",
                    params={
                        "start": (page - 1) * pageSz,
                        "limit": pageSz
                    })
                # It's 10 PM and I have no clue why it's throwing these errors, maybe we just need to log in again?
                if res.status_code == 403 and not retried_auth:
                    retried_auth = True
                    session = self._get_session(serviceRecord, skip_cache=True)
                else:
                    break
            try:
                res = res.json()["results"]
            except ValueError:
                res_txt = res.text  # So it can capture in the log message
                raise APIException("Parse failure in GC list resp: %s" %
                                   res.status_code)
            if "activities" not in res:
                break  # No activities on this page - empty account.
            for act in res["activities"]:
                act = act["activity"]
                if "sumDistance" not in act:
                    exclusions.append(
                        APIExcludeActivity("No distance",
                                           activityId=act["activityId"],
                                           userException=UserException(
                                               UserExceptionType.Corrupt)))
                    continue
                activity = UploadedActivity()

                # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped...
                activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act
                activity.GPS = "endLatitude" in act

                activity.Private = act["privacy"]["key"] == "private"

                try:
                    activity.TZ = pytz.timezone(act["activityTimeZone"]["key"])
                except pytz.exceptions.UnknownTimeZoneError:
                    activity.TZ = pytz.FixedOffset(
                        float(act["activityTimeZone"]["offset"]) * 60)

                logger.debug("Name " + act["activityName"]["value"] + ":")
                if len(act["activityName"]["value"].strip(
                )) and act["activityName"][
                        "value"] != "Untitled":  # This doesn't work for internationalized accounts, oh well.
                    activity.Name = act["activityName"]["value"]

                if len(act["activityDescription"]["value"].strip()):
                    activity.Notes = act["activityDescription"]["value"]

                # beginTimestamp/endTimestamp is in UTC
                activity.StartTime = pytz.utc.localize(
                    datetime.utcfromtimestamp(
                        float(act["beginTimestamp"]["millis"]) / 1000))
                if "sumElapsedDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(
                        0, round(float(act["sumElapsedDuration"]["value"])))
                elif "sumDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(
                        minutes=float(act["sumDuration"]
                                      ["minutesSeconds"].split(":")[0]),
                        seconds=float(act["sumDuration"]
                                      ["minutesSeconds"].split(":")[1]))
                else:
                    activity.EndTime = pytz.utc.localize(
                        datetime.utcfromtimestamp(
                            float(act["endTimestamp"]["millis"]) / 1000))
                logger.debug("Activity s/t " + str(activity.StartTime) +
                             " on page " + str(page))
                activity.AdjustTZ()

                # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time.
                activity.Stats.Distance = ActivityStatistic(
                    self._unitMap[act["sumDistance"]["uom"]],
                    value=float(act["sumDistance"]["value"]))

                activity.Type = self._resolveActivityType(
                    act["activityType"]["key"])

                activity.CalculateUID()

                activity.ServiceData = {"ActivityID": int(act["activityId"])}

                activities.append(activity)
            logger.debug("Finished page " + str(page) + " of " +
                         str(res["search"]["totalPages"]))
            if not exhaustive or int(res["search"]["totalPages"]) == page:
                break
            else:
                page += 1
        return activities, exclusions
Exemplo n.º 19
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []

        url = self.SetioDomain + "getRunsByUserId"
        extID = svcRecord.ExternalID

        payload = {"userId": extID}
        headers = {
            'content-type': "application/json",
            'cache-control': "no-cache",
        }
        response = requests.post(url,
                                 data=json.dumps(payload),
                                 headers=headers)
        try:
            reqdata = response.json()
        except ValueError:
            raise APIException("Failed parsing Setio list response %s - %s" %
                               (resp.status_code, resp.text))

        for ride in reqdata:
            activity = UploadedActivity()
            activity.StartTime = datetime.strptime(
                datetime.utcfromtimestamp(
                    ride["startTimeStamp"]).strftime('%Y-%m-%d %H:%M:%S'),
                "%Y-%m-%d %H:%M:%S")
            if "stopTimeStamp" in ride:
                activity.EndTime = datetime.strptime(
                    datetime.utcfromtimestamp(
                        ride["stopTimeStamp"]).strftime('%Y-%m-%d %H:%M:%S'),
                    "%Y-%m-%d %H:%M:%S")
            activity.ServiceData = {
                "ActivityID": ride["runId"],
                "Manual": "False"
            }

            activity.Name = ride["programName"]

            logger.debug("\tActivity s/t %s: %s" %
                         (activity.StartTime, activity.Name))
            activity.Type = ActivityType.Running
            if "totalDistance" in ride:
                activity.Stats.Distance = ActivityStatistic(
                    ActivityStatisticUnit.Meters, value=ride["totalDistance"])

            if "averageCadence" in ride:
                activity.Stats.Cadence.update(
                    ActivityStatistic(
                        ActivityStatisticUnit.RevolutionsPerMinute,
                        avg=ride["averageCadence"]))

            if "averageSpeed" in ride:
                activity.Stats.Speed = ActivityStatistic(
                    ActivityStatisticUnit.MetersPerSecond,
                    avg=ride["averageSpeed"])

            # get comment
            url = self.SetioDomain + "getRunComment"
            payload = {
                "userId": extID,
                "runId": activity.ServiceData["ActivityID"]
            }
            headers = {
                'content-type': "application/json",
                'cache-control': "no-cache",
            }
            streamdata = requests.post(url,
                                       data=json.dumps(payload),
                                       headers=headers)
            if streamdata.status_code == 500:
                raise APIException("Internal server error")

            if streamdata.status_code == 403:
                raise APIException("No authorization to download activity",
                                   block=True,
                                   user_exception=UserException(
                                       UserExceptionType.Authorization,
                                       intervention_required=True))

            activity.Notes = None
            if streamdata.status_code == 200:  # Ok
                try:
                    commentdata = streamdata.json()
                except:
                    raise APIException("Stream data returned is not JSON")

                if "comment" in commentdata:
                    activity.Notes = commentdata["comment"]

            activity.GPS = True

            activity.Private = False
            activity.Stationary = False  # True = no sensor data

            activity.CalculateUID()
            activities.append(activity)

        return activities, exclusions
Exemplo n.º 20
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50
        session = self._get_session(record=serviceRecord)
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz}))
            self._rate_limit()

            retried_auth = False
            while True:
                res = session.get("http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz})
                # It's 10 PM and I have no clue why it's throwing these errors, maybe we just need to log in again?
                if res.status_code == 403 and not retried_auth:
                    retried_auth = True
                    session = self._get_session(serviceRecord, skip_cache=True)
                else:
                    break
            try:
                res = res.json()["results"]
            except ValueError:
                res_txt = res.text # So it can capture in the log message
                raise APIException("Parse failure in GC list resp: %s" % res.status_code)
            if "activities" not in res:
                break  # No activities on this page - empty account.
            for act in res["activities"]:
                act = act["activity"]
                activity = UploadedActivity()

                # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped...
                activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act
                activity.GPS = "endLatitude" in act

                activity.Private = act["privacy"]["key"] == "private"

                try:
                    activity.TZ = pytz.timezone(act["activityTimeZone"]["key"])
                except pytz.exceptions.UnknownTimeZoneError:
                    activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60)

                logger.debug("Name " + act["activityName"]["value"] + ":")
                if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled": # This doesn't work for internationalized accounts, oh well.
                    activity.Name = act["activityName"]["value"]

                if len(act["activityDescription"]["value"].strip()):
                    activity.Notes = act["activityDescription"]["value"]

                # beginTimestamp/endTimestamp is in UTC
                activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000))
                if "sumElapsedDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"])))
                elif "sumDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]))
                else:
                    activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000))
                logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page))
                activity.AdjustTZ()

                if "sumDistance" in act and float(act["sumDistance"]["value"]) != 0:
                    activity.Stats.Distance = ActivityStatistic(self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"]))

                activity.Type = self._resolveActivityType(act["activityType"]["key"])

                activity.CalculateUID()
                
                activity.ServiceData = {"ActivityID": int(act["activityId"])}

                activities.append(activity)
            logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"]))
            if not exhaustive or int(res["search"]["totalPages"]) == page:
                break
            else:
                page += 1
        return activities, exclusions
Exemplo n.º 21
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        ns = self._tp_ns
        activities = []
        exclusions = []

        reqData = self._authData(svcRecord)

        limitDateFormat = "%d %B %Y"

        if exhaustive:
            listEnd = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in
            listStart = datetime(day=1, month=1, year=1980) # The beginning of time
        else:
            listEnd = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in
            listStart = listEnd - timedelta(days=20) # Doesn't really matter

        lastActivityDay = None
        discoveredWorkoutIds = []
        while True:
            reqData.update({"startDate": listStart.strftime(limitDateFormat), "endDate": listEnd.strftime(limitDateFormat)})
            print("Requesting %s to %s" % (listStart, listEnd))
            resp = requests.post("https://www.trainingpeaks.com/tpwebservices/service.asmx/GetWorkoutsForAthlete", data=reqData)
            xresp = etree.XML(resp.content)
            for xworkout in xresp:
                activity = UploadedActivity()

                workoutId = xworkout.find("tpw:WorkoutId", namespaces=ns).text

                workoutDayEl = xworkout.find("tpw:WorkoutDay", namespaces=ns)
                startTimeEl = xworkout.find("tpw:StartTime", namespaces=ns)

                workoutDay = dateutil.parser.parse(workoutDayEl.text)
                startTime = dateutil.parser.parse(startTimeEl.text) if startTimeEl is not None and startTimeEl.text else None

                if lastActivityDay is None or workoutDay.replace(tzinfo=None) > lastActivityDay:
                    lastActivityDay = workoutDay.replace(tzinfo=None)

                if startTime is None:
                    continue # Planned but not executed yet.
                activity.StartTime = startTime

                endTimeEl = xworkout.find("tpw:TimeTotalInSeconds", namespaces=ns)
                if not endTimeEl.text:
                    exclusions.append(APIExcludeActivity("Activity has no duration", activity_id=workoutId, user_exception=UserException(UserExceptionType.Corrupt)))
                    continue

                activity.EndTime = activity.StartTime + timedelta(seconds=float(endTimeEl.text))

                distEl = xworkout.find("tpw:DistanceInMeters", namespaces=ns)
                if distEl.text:
                    activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(distEl.text))
                # PWX is damn near comprehensive, no need to fill in any of the other statisitcs here, really

                if workoutId in discoveredWorkoutIds:
                    continue # There's the possibility of query overlap, if there are multiple activities on a single day that fall across the query return limit
                discoveredWorkoutIds.append(workoutId)

                workoutTypeEl = xworkout.find("tpw:WorkoutTypeDescription", namespaces=ns)
                if workoutTypeEl.text:
                    if workoutTypeEl.text == "Day Off":
                        continue # TrainingPeaks has some weird activity types...
                    if workoutTypeEl.text not in self._workoutTypeMappings:
                        exclusions.append(APIExcludeActivity("Activity type %s unknown" % workoutTypeEl.text, activity_id=workoutId, user_exception=UserException(UserExceptionType.Corrupt)))
                        continue
                    activity.Type = self._workoutTypeMappings[workoutTypeEl.text]

                activity.ServiceData = {"WorkoutID": workoutId}
                activity.CalculateUID()
                activities.append(activity)

            if not exhaustive:
                break

            # Since TP only lets us query by date range, to get full activity history we need to query successively smaller ranges
            if len(xresp):
                if listStart == lastActivityDay:
                    break # This wouldn't work if you had more than #MaxQueryReturn activities on that day - but that number is probably 50+
                listStart = lastActivityDay
            else:
                break # We're done

        return activities, exclusions
Exemplo n.º 22
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz}))

            res = self._request_with_reauth(serviceRecord, lambda session: session.get("https://connect.garmin.com/modern/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}))

            try:
                res = res.json()["results"]
            except ValueError:
                res_txt = res.text # So it can capture in the log message
                raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res.text))
            if "activities" not in res:
                break  # No activities on this page - empty account.
            for act in res["activities"]:
                act = act["activity"]
                activity = UploadedActivity()

                # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped...
                activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act
                activity.GPS = "endLatitude" in act

                activity.Private = act["privacy"]["key"] == "private"

                try:
                    activity.TZ = pytz.timezone(act["activityTimeZone"]["key"])
                except pytz.exceptions.UnknownTimeZoneError:
                    activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60)

                logger.debug("Name " + act["activityName"]["value"] + ":")
                if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled": # This doesn't work for internationalized accounts, oh well.
                    activity.Name = act["activityName"]["value"]

                if len(act["activityDescription"]["value"].strip()):
                    activity.Notes = act["activityDescription"]["value"]

                # beginTimestamp/endTimestamp is in UTC
                activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000))
                if "sumElapsedDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"])))
                elif "sumDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]))
                else:
                    activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000))
                logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page))
                activity.AdjustTZ()

                if "sumDistance" in act and float(act["sumDistance"]["value"]) != 0:
                    activity.Stats.Distance = ActivityStatistic(self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"]))

                if "device" in act and act["device"]["key"] != "unknown":
                    devId = DeviceIdentifier.FindMatchingIdentifierOfType(DeviceIdentifierType.GC, {"Key": act["device"]["key"]})
                    ver_split = act["device"]["key"].split(".")
                    ver_maj = None
                    ver_min = None
                    if len(ver_split) == 4:
                        # 2.90.0.0
                        ver_maj = int(ver_split[0])
                        ver_min = int(ver_split[1])
                    activity.Device = Device(devId, verMaj=ver_maj, verMin=ver_min)

                activity.Type = self._resolveActivityType(act["activityType"]["key"])

                activity.CalculateUID()

                activity.ServiceData = {"ActivityID": int(act["activityId"])}

                activities.append(activity)
            logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"]))
            if not exhaustive or int(res["search"]["totalPages"]) == page:
                break
            else:
                page += 1
        return activities, exclusions
Exemplo n.º 23
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):

        def mapStatTriple(act, stats_obj, key, units):
            if "%s_max" % key in act and act["%s_max" % key]:
                stats_obj.update(ActivityStatistic(units, max=float(act["%s_max" % key])))
            if "%s_min" % key in act and act["%s_min" % key]:
                stats_obj.update(ActivityStatistic(units, min=float(act["%s_min" % key])))
            if "%s_avg" % key in act and act["%s_avg" % key]:
                stats_obj.update(ActivityStatistic(units, avg=float(act["%s_avg" % key])))


        # http://ridewithgps.com/users/1/trips.json?limit=200&order_by=created_at&order_dir=asc
        # offset also supported
        activities = []
        exclusions = []
        # They don't actually support paging right now, for whatever reason
        params = self._add_auth_params({}, record=serviceRecord)

        res = requests.get("http://ridewithgps.com/users/{}/trips.json".format(serviceRecord.ExternalID), params=params)
        res = res.json()

        # Apparently some API users are seeing this new result format - I'm not
        if type(res) is dict:
            res = res.get("results", [])

        if res == []:
            return [], [] # No activities
        for act in res:
            if "distance" not in act:
                exclusions.append(APIExcludeActivity("No distance", activity_id=act["id"], user_exception=UserException(UserExceptionType.Corrupt)))
                continue
            if "duration" not in act or not act["duration"]:
                exclusions.append(APIExcludeActivity("No duration", activity_id=act["id"], user_exception=UserException(UserExceptionType.Corrupt)))
                continue
            activity = UploadedActivity()

            logger.debug("Name " + act["name"] + ":")
            if len(act["name"].strip()):
                activity.Name = act["name"]

            if len(act["description"].strip()):
                activity.Notes = act["description"]

            activity.GPS = act["is_gps"]
            activity.Stationary = not activity.GPS # I think

            # 0 = public, 1 = private, 2 = friends
            activity.Private = act["visibility"] == 1

            activity.StartTime = dateutil.parser.parse(act["departed_at"])

            try:
                activity.TZ = pytz.timezone(act["time_zone"])
            except pytz.exceptions.UnknownTimeZoneError:
                # Sometimes the time_zone returned isn't quite what we'd like it
                # So, just pull the offset from the datetime
                if isinstance(activity.StartTime.tzinfo, tzutc):
                    activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value.
                else:
                    activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo.utcoffset(activity.StartTime).total_seconds() / 60)

            activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ) # Overwrite dateutil's sillyness

            activity.EndTime = activity.StartTime + timedelta(seconds=self._duration_to_seconds(act["duration"]))
            logger.debug("Activity s/t " + str(activity.StartTime))
            activity.AdjustTZ()

            activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, float(act["distance"]))

            mapStatTriple(act, activity.Stats.Power, "watts", ActivityStatisticUnit.Watts)
            mapStatTriple(act, activity.Stats.Speed, "speed", ActivityStatisticUnit.KilometersPerHour)
            mapStatTriple(act, activity.Stats.Cadence, "cad", ActivityStatisticUnit.RevolutionsPerMinute)
            mapStatTriple(act, activity.Stats.HR, "hr", ActivityStatisticUnit.BeatsPerMinute)

            if "elevation_gain" in act and act["elevation_gain"]:
                activity.Stats.Elevation.update(ActivityStatistic(ActivityStatisticUnit.Meters, gain=float(act["elevation_gain"])))

            if "elevation_loss" in act and act["elevation_loss"]:
                activity.Stats.Elevation.update(ActivityStatistic(ActivityStatisticUnit.Meters, loss=float(act["elevation_loss"])))

            # Activity type is not implemented yet in RWGPS results; we will assume cycling, though perhaps "OTHER" wouuld be correct
            activity.Type = ActivityType.Cycling

            activity.CalculateUID()
            activity.ServiceData = {"ActivityID": act["id"]}
            activities.append(activity)
        return activities, exclusions
Exemplo n.º 24
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50
        cookies = self._get_cookies(record=serviceRecord)
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({
                "start": (page - 1) * pageSz,
                "limit": pageSz
            }))
            self._rate_limit()
            res = requests.get(
                "http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities",
                params={
                    "start": (page - 1) * pageSz,
                    "limit": pageSz
                },
                cookies=cookies)
            res = res.json()["results"]
            if "activities" not in res:
                break  # No activities on this page - empty account.
            for act in res["activities"]:
                act = act["activity"]
                if "sumDistance" not in act:
                    exclusions.append(
                        APIExcludeActivity("No distance",
                                           activityId=act["activityId"],
                                           userException=UserException(
                                               UserExceptionType.Corrupt)))
                    continue
                activity = UploadedActivity()

                if "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act:  # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped...
                    activity.Stationary = True
                else:
                    activity.Stationary = False

                try:
                    activity.TZ = pytz.timezone(act["activityTimeZone"]["key"])
                except pytz.exceptions.UnknownTimeZoneError:
                    activity.TZ = pytz.FixedOffset(
                        float(act["activityTimeZone"]["offset"]) * 60)

                logger.debug("Name " + act["activityName"]["value"] + ":")
                if len(act["activityName"]["value"].strip(
                )) and act["activityName"][
                        "value"] != "Untitled":  # This doesn't work for internationalized accounts, oh well.
                    activity.Name = act["activityName"]["value"]

                if len(act["activityDescription"]["value"].strip()):
                    activity.Notes = act["activityDescription"]["value"]
                # beginTimestamp/endTimestamp is in UTC
                activity.StartTime = pytz.utc.localize(
                    datetime.utcfromtimestamp(
                        float(act["beginTimestamp"]["millis"]) / 1000))
                if "sumElapsedDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(
                        0, round(float(act["sumElapsedDuration"]["value"])))
                elif "sumDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(
                        minutes=float(act["sumDuration"]
                                      ["minutesSeconds"].split(":")[0]),
                        seconds=float(act["sumDuration"]
                                      ["minutesSeconds"].split(":")[1]))
                else:
                    activity.EndTime = pytz.utc.localize(
                        datetime.utcfromtimestamp(
                            float(act["endTimestamp"]["millis"]) / 1000))
                logger.debug("Activity s/t " + str(activity.StartTime) +
                             " on page " + str(page))
                activity.AdjustTZ()
                # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time.
                activity.Stats.Distance = ActivityStatistic(
                    self._unitMap[act["sumDistance"]["uom"]],
                    value=float(act["sumDistance"]["value"]))

                def mapStat(gcKey, statKey, type, useSourceUnits=False):
                    nonlocal activity, act
                    if gcKey in act:
                        value = float(act[gcKey]["value"])
                        if math.isinf(value):
                            return  # GC returns the minimum speed as "-Infinity" instead of 0 some times :S
                        activity.Stats.__dict__[statKey].update(
                            ActivityStatistic(self._unitMap[act[gcKey]["uom"]],
                                              **({
                                                  type: value
                                              })))
                        if useSourceUnits:
                            activity.Stats.__dict__[
                                statKey] = activity.Stats.__dict__[
                                    statKey].asUnits(
                                        self._unitMap[act[gcKey]["uom"]])

                if "sumMovingDuration" in act:
                    activity.Stats.MovingTime = ActivityStatistic(
                        ActivityStatisticUnit.Time,
                        value=timedelta(
                            seconds=float(act["sumMovingDuration"]["value"])))

                if "sumDuration" in act:
                    activity.Stats.TimerTime = ActivityStatistic(
                        ActivityStatisticUnit.Time,
                        value=timedelta(
                            minutes=float(act["sumDuration"]
                                          ["minutesSeconds"].split(":")[0]),
                            seconds=float(act["sumDuration"]
                                          ["minutesSeconds"].split(":")[1])))

                mapStat(
                    "minSpeed", "Speed", "min", useSourceUnits=True
                )  # We need to suppress conversion here, so we can fix the pace-speed issue below
                mapStat("maxSpeed", "Speed", "max", useSourceUnits=True)
                mapStat("weightedMeanSpeed",
                        "Speed",
                        "avg",
                        useSourceUnits=True)
                mapStat("minAirTemperature", "Temperature", "min")
                mapStat("maxAirTemperature", "Temperature", "max")
                mapStat("weightedMeanAirTemperature", "Temperature", "avg")
                mapStat("sumEnergy", "Energy", "value")
                mapStat("maxHeartRate", "HR", "max")
                mapStat("weightedMeanHeartRate", "HR", "avg")
                mapStat("maxRunCadence", "RunCadence", "max")
                mapStat("weightedMeanRunCadence", "RunCadence", "avg")
                mapStat("maxBikeCadence", "Cadence", "max")
                mapStat("weightedMeanBikeCadence", "Cadence", "avg")
                mapStat("minPower", "Power", "min")
                mapStat("maxPower", "Power", "max")
                mapStat("weightedMeanPower", "Power", "avg")
                mapStat("minElevation", "Elevation", "min")
                mapStat("maxElevation", "Elevation", "max")
                mapStat("gainElevation", "Elevation", "gain")
                mapStat("lossElevation", "Elevation", "loss")

                # In Garmin Land, max can be smaller than min for this field :S
                if activity.Stats.Power.Max is not None and activity.Stats.Power.Min is not None and activity.Stats.Power.Min > activity.Stats.Power.Max:
                    activity.Stats.Power.Min = None

                # To get it to match what the user sees in GC.
                if activity.Stats.RunCadence.Max is not None:
                    activity.Stats.RunCadence.Max *= 2
                if activity.Stats.RunCadence.Average is not None:
                    activity.Stats.RunCadence.Average *= 2

                # GC incorrectly reports pace measurements as kph/mph when they are in fact in min/km or min/mi
                if "minSpeed" in act:
                    if ":" in act["minSpeed"][
                            "withUnitAbbr"] and activity.Stats.Speed.Min:
                        activity.Stats.Speed.Min = 60 / activity.Stats.Speed.Min
                if "maxSpeed" in act:
                    if ":" in act["maxSpeed"][
                            "withUnitAbbr"] and activity.Stats.Speed.Max:
                        activity.Stats.Speed.Max = 60 / activity.Stats.Speed.Max
                if "weightedMeanSpeed" in act:
                    if ":" in act["weightedMeanSpeed"][
                            "withUnitAbbr"] and activity.Stats.Speed.Average:
                        activity.Stats.Speed.Average = 60 / activity.Stats.Speed.Average

                # Similarly, they do weird stuff with HR at times - %-of-max and zones
                # ...and we can't just fix these, so we have to calculate it after the fact (blegh)
                recalcHR = False
                if "maxHeartRate" in act:
                    if "%" in act["maxHeartRate"]["withUnitAbbr"] or "z" in act[
                            "maxHeartRate"]["withUnitAbbr"]:
                        activity.Stats.HR.Max = None
                        recalcHR = True
                if "weightedMeanHeartRate" in act:
                    if "%" in act["weightedMeanHeartRate"][
                            "withUnitAbbr"] or "z" in act[
                                "weightedMeanHeartRate"]["withUnitAbbr"]:
                        activity.Stats.HR.Average = None
                        recalcHR = True

                activity.Type = self._resolveActivityType(
                    act["activityType"]["key"])

                activity.CalculateUID()
                activity.ServiceData = {
                    "ActivityID": act["activityId"],
                    "RecalcHR": recalcHR
                }

                activities.append(activity)
            logger.debug("Finished page " + str(page) + " of " +
                         str(res["search"]["totalPages"]))
            if not exhaustive or int(res["search"]["totalPages"]) == page:
                break
            else:
                page += 1
        return activities, exclusions
Exemplo n.º 25
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50
        cookies = self._get_cookies(record=serviceRecord)
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz}))
            self._rate_limit()
            res = requests.get("http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}, cookies=cookies)
            try:
                res = res.json()["results"]
            except ValueError:
                res_txt = res.text # So it can capture in the log message
                raise APIException("Parse failure in GC list resp: %s" % res.status_code)
            if "activities" not in res:
                break  # No activities on this page - empty account.
            for act in res["activities"]:
                act = act["activity"]
                if "sumDistance" not in act:
                    exclusions.append(APIExcludeActivity("No distance", activityId=act["activityId"], userException=UserException(UserExceptionType.Corrupt)))
                    continue
                activity = UploadedActivity()

                if "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act: # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped...
                    activity.Stationary = True
                else:
                    activity.Stationary = False

                activity.Private = act["privacy"]["key"] == "private"

                try:
                    activity.TZ = pytz.timezone(act["activityTimeZone"]["key"])
                except pytz.exceptions.UnknownTimeZoneError:
                    activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60)

                logger.debug("Name " + act["activityName"]["value"] + ":")
                if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled": # This doesn't work for internationalized accounts, oh well.
                    activity.Name = act["activityName"]["value"]

                if len(act["activityDescription"]["value"].strip()):
                    activity.Notes = act["activityDescription"]["value"]

                # beginTimestamp/endTimestamp is in UTC
                activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000))
                if "sumElapsedDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"])))
                elif "sumDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]))
                else:
                    activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000))
                logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page))
                activity.AdjustTZ()

                # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time.
                activity.Stats.Distance = ActivityStatistic(self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"]))

                activity.Type = self._resolveActivityType(act["activityType"]["key"])

                activity.CalculateUID()
                activity.ServiceData = {"ActivityID": act["activityId"]}

                activities.append(activity)
            logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"]))
            if not exhaustive or int(res["search"]["totalPages"]) == page:
                break
            else:
                page += 1
        return activities, exclusions
Exemplo n.º 26
0
    def _populate_sbr_activity(self, api_sbr_activity, usersettings):
        # Example JSON feed (unimportant fields have been removed)
        # [{
        #    "EventId": 63128401,                   #  Internal ID
        #    "EventType": 3,                        #  Swim (3), bike (1), or run (2)
        #    "EventDate": "4/22/2016",
        #    "EventTime": "7:44 AM",                #  User's time, time zone not specified
        #    "Planned": false,                      #  Training plan or actual data
        #    "TotalMinutes": 34.97,
        #    "TotalKilometers": 1.55448,
        #    "AverageHeartRate": 125,
        #    "MinimumHeartRate": 100,
        #    "MaximumHeartRate": 150,
        #    "MemberId": 999999,
        #    "MemberUsername": "******",
        #    "HasDeviceUpload": true,
        #    "DeviceUploadFile": "http://beginnertriathlete.com/discussion/storage/workouts/555555/abcd-123.fit",
        #    "RouteName": "",                       #  Might contain a description of the event
        #    "Comments": "",                        #  Same as above. Not overly often used.
        # }, ... ]

        activity = UploadedActivity()
        workout_id = api_sbr_activity["EventId"]
        eventType = api_sbr_activity["EventType"]
        eventDate = api_sbr_activity["EventDate"]
        eventTime = api_sbr_activity["EventTime"]
        totalMinutes = api_sbr_activity["TotalMinutes"]
        totalKms = api_sbr_activity["TotalKilometers"]
        averageHr = api_sbr_activity["AverageHeartRate"]
        minimumHr = api_sbr_activity["MinimumHeartRate"]
        maximumHr = api_sbr_activity["MaximumHeartRate"]
        deviceUploadFile = api_sbr_activity["DeviceUploadFile"]

        # Basic SBR data does not include GPS or sensor data. If this event originated from a device upload,
        # DownloadActivity will find it.
        activity.Stationary = True

        # Same as above- The data might be there, but it's not supplied in the basic activity feed.
        activity.GPS = False

        activity.Private = usersettings["Privacy"]
        activity.Type = self._workoutTypeMappings[str(eventType)]

        # Get the user's timezone from their profile. (Activity.TZ should be mentioned in the object hierarchy docs?)
        # Question: I believe if DownloadActivity finds device data, it will overwrite this. Which is OK with me.
        # The device data will most likely be more accurate.
        try:
            activity.TZ = pytz.timezone(usersettings["TimeZone"])
        except pytz.exceptions.UnknownTimeZoneError:
            activity.TZ = pytz.timezone(self._serverDefaultTimezone)

        # activity.StartTime and EndTime aren't mentioned in the object hierarchy docs, but I see them
        # set in all the other providers.
        activity.StartTime = dateutil.parser.parse(
            eventDate + " " + eventTime,
            dayfirst=False).replace(tzinfo=activity.TZ)
        activity.EndTime = activity.StartTime + timedelta(minutes=totalMinutes)

        # We can calculate some metrics from the supplied data. Would love to see some non-source code documentation
        # on each statistic and what it expects as input.
        activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers,
                                                    value=totalKms)
        activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute,
                                              avg=float(averageHr),
                                              min=float(minimumHr),
                                              max=float(maximumHr))
        activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds,
                                                      value=float(totalMinutes * 60))
        activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds,
                                                     value=float(totalMinutes * 60))
        # While BT does support laps, the current API doesn't report on them - a limitation that may need to be
        # corrected in a future update. For now, treat manual entries as a single lap. As more and more people upload
        # workouts using devices anyway, this probably matters much less than it once did.
        lap = Lap(stats=activity.Stats, startTime=activity.StartTime, endTime=activity.EndTime)
        activity.Laps = [lap]

        # Not 100% positive how this is utilized, but it is common for all providers. Detects duplicate downloads?
        activity.CalculateUID()

        # If a device file is attached, we'll get more details about this event in DownloadActivity
        activity.ServiceData = {
            "ID": int(workout_id),
            "DeviceUploadFile": deviceUploadFile
        }

        return activity
Exemplo n.º 27
0
    def DownloadActivityList(self, svcRec, exhaustive=False):
        dbcl = self._getClient(svcRec)
        if not svcRec.Authorization["Full"]:
            syncRoot = "/"
        else:
            syncRoot = svcRec.Config["SyncRoot"]
        # Dropbox API v2 doesn't like / as root.
        if syncRoot == "/":
            syncRoot = ""
        # New Dropbox API prefers path_lower, it would seem.
        syncRoot = syncRoot.lower()

        # There used to be a massive affair going on here to cache the folder structure locally.
        # Dropbox API 2.0 doesn't support the hashes I need for that.
        # Oh well. Throw that data out now. Well, don't load it at all.
        cache = cachedb.dropbox_cache.find_one({"ExternalID": svcRec.ExternalID}, {"ExternalID": True, "Activities": True})
        if cache is None:
            cache = {"ExternalID": svcRec.ExternalID, "Activities": {}}

        try:
            list_result = dbcl.files_list_folder(syncRoot, recursive=True)
        except dropbox.exceptions.DropboxException as e:
            self._raiseDbException(e)

        def cache_writeback():
            if "_id" in cache:
                cachedb.dropbox_cache.save(cache)
            else:
                insert_result = cachedb.dropbox_cache.insert(cache)
                cache["_id"] = insert_result.inserted_id


        activities = []
        exclusions = []
        discovered_activity_cache_keys = set()

        while True:
            for entry in list_result.entries:
                if not hasattr(entry, "rev"):
                    # Not a file -> we don't care.
                    continue
                path = entry.path_lower

                if not path.endswith(".gpx") and not path.endswith(".tcx"):
                    # Not an activity file -> we don't care.
                    continue

                if svcRec.Authorization["Full"]:
                    relPath = path.replace(syncRoot, "", 1)
                else:
                    relPath = path.replace("/Apps/tapiriik/", "", 1)  # dropbox api is meh api

                hashedRelPath = self._hash_path(relPath)
                discovered_activity_cache_keys.add(hashedRelPath)
                if hashedRelPath in cache["Activities"]:
                    existing = cache["Activities"][hashedRelPath]
                else:
                    existing = None

                if existing and existing["Rev"] == entry.rev:
                    # don't need entire activity loaded here, just UID
                    act = UploadedActivity()
                    act.UID = existing["UID"]
                    try:
                        act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y %z")
                    except:
                        act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y") # Exactly one user has managed to break %z :S
                    if "EndTime" in existing:  # some cached activities may not have this, it is not essential
                        act.EndTime = datetime.strptime(existing["EndTime"], "%H:%M:%S %d %m %Y %z")
                else:
                    logger.debug("Retrieving %s (%s)" % (path, "outdated meta cache" if existing else "not in meta cache"))
                    # get the full activity
                    try:
                        act, rev = self._getActivity(svcRec, dbcl, path)
                    except APIExcludeActivity as e:
                        logger.info("Encountered APIExcludeActivity %s" % str(e))
                        exclusions.append(strip_context(e))
                        continue

                    try:
                        act.EnsureTZ()
                    except:
                        pass # We tried.

                    act.Laps = []  # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM.
                    cache["Activities"][hashedRelPath] = {"Rev": rev, "UID": act.UID, "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"), "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")}
                    # Incrementally update the cache db.
                    # Otherwise, if we crash later on in listing
                    # (due to OOM or similar), we'll never make progress on this account.
                    cache_writeback()
                tagRes = self._tagActivity(relPath)
                act.ServiceData = {"Path": path, "Tagged": tagRes is not None}

                act.Type = tagRes if tagRes is not None else ActivityType.Other

                logger.debug("Activity s/t %s" % act.StartTime)

                activities.append(act)

            # Perform pagination.
            if list_result.has_more:
                list_result = dbcl.files_list_folder_continue(list_result.cursor)
            else:
                break

        # Drop deleted activities' records from cache.
        all_activity_cache_keys = set(cache["Activities"].keys())
        for deleted_key in all_activity_cache_keys - discovered_activity_cache_keys:
            del cache["Activities"][deleted_key]

        cache_writeback()
        return activities, exclusions
Exemplo n.º 28
0
    def DownloadActivityList(self, svcRec, exhaustive=False):
        dbcl = self._getClient(svcRec)
        if not svcRec.Authorization["Full"]:
            syncRoot = "/"
        else:
            syncRoot = svcRec.Config["SyncRoot"]
        cache = cachedb.dropbox_cache.find_one(
            {"ExternalID": svcRec.ExternalID})
        if cache is None:
            cache = {
                "ExternalID": svcRec.ExternalID,
                "Structure": [],
                "Activities": {}
            }
        if "Structure" not in cache:
            cache["Structure"] = []
        self._folderRecurse(cache["Structure"], dbcl, syncRoot)

        activities = []
        exclusions = []

        for dir in cache["Structure"]:
            for file in dir["Files"]:
                path = file["Path"]
                if svcRec.Authorization["Full"]:
                    relPath = path.replace(syncRoot, "", 1)
                else:
                    relPath = path.replace("/Apps/tapiriik/", "",
                                           1)  # dropbox api is meh api

                hashedRelPath = self._hash_path(relPath)
                if hashedRelPath in cache["Activities"]:
                    existing = cache["Activities"][hashedRelPath]
                else:
                    existing = None

                if not existing:
                    # Continue to use the old records keyed by UID where possible
                    existing = [
                        (k, x) for k, x in cache["Activities"].items()
                        if "Path" in x and x["Path"] == relPath
                    ]  # path is relative to syncroot to reduce churn if they relocate it
                    existing = existing[0] if existing else None
                    if existing is not None:
                        existUID, existing = existing
                        existing["UID"] = existUID

                if existing and existing["Rev"] == file["Rev"]:
                    # don't need entire activity loaded here, just UID
                    act = UploadedActivity()
                    act.UID = existing["UID"]
                    try:
                        act.StartTime = datetime.strptime(
                            existing["StartTime"], "%H:%M:%S %d %m %Y %z")
                    except:
                        act.StartTime = datetime.strptime(
                            existing["StartTime"], "%H:%M:%S %d %m %Y"
                        )  # Exactly one user has managed to break %z :S
                    if "EndTime" in existing:  # some cached activities may not have this, it is not essential
                        act.EndTime = datetime.strptime(
                            existing["EndTime"], "%H:%M:%S %d %m %Y %z")
                else:
                    logger.debug("Retrieving %s (%s)" %
                                 (path, "outdated meta cache"
                                  if existing else "not in meta cache"))
                    # get the full activity
                    try:
                        act, rev = self._getActivity(svcRec, dbcl, path)
                    except APIExcludeActivity as e:
                        logger.info("Encountered APIExcludeActivity %s" %
                                    str(e))
                        exclusions.append(strip_context(e))
                        continue

                    try:
                        act.EnsureTZ()
                    except:
                        pass  # We tried.

                    if hasattr(act, "OriginatedFromTapiriik"
                               ) and not act.CountTotalWaypoints():
                        # This is one of the files created when TCX export was hopelessly broken for non-GPS activities.
                        # Right now, no activities in dropbox from tapiriik should be devoid of waypoints - since dropbox doesn't receive stationary activities
                        # In the future when this changes, will obviously have to modify this code to also look at modification dates or similar.
                        if ".tcx.summary-data" in path:
                            logger.info("...summary file already moved")
                        else:
                            logger.info("...moving summary-only file")
                            dbcl.file_move(
                                path, path.replace(".tcx",
                                                   ".tcx.summary-data"))
                        continue  # DON'T include in listing - it'll be regenerated
                    del act.Laps
                    act.Laps = [
                    ]  # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM.
                    cache["Activities"][hashedRelPath] = {
                        "Rev": rev,
                        "UID": act.UID,
                        "StartTime":
                        act.StartTime.strftime("%H:%M:%S %d %m %Y %z"),
                        "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")
                    }
                tagRes = self._tagActivity(relPath)
                act.ServiceData = {"Path": path, "Tagged": tagRes is not None}

                act.Type = tagRes if tagRes is not None else ActivityType.Other

                logger.debug("Activity s/t %s" % act.StartTime)

                activities.append(act)

        if "_id" in cache:
            cachedb.dropbox_cache.save(cache)
        else:
            cachedb.dropbox_cache.insert(cache)
        return activities, exclusions
Exemplo n.º 29
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):

        activities = []
        exclusions = []
        earliestDate = None
        earliestFirstPageDate = None
        paged = False

        while True:
            before = "" if earliestDate is None else earliestDate.astimezone(pytz.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
            params = {"authToken": serviceRecord.Authorization["AuthToken"], "maxResults": 45, "before": before}
            logger.debug("Req with " + str(params))
            response = requests.get("http://api.mobile.endomondo.com/mobile/api/workout/list", params=params)

            if response.status_code != 200:
                if response.status_code == 401 or response.status_code == 403:
                    raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True))
                raise APIException("Unable to retrieve activity list " + str(response))
            data = response.json()

            if "error" in data and data["error"]["type"] == "AUTH_FAILED":
                raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True))

            track_ids = []
            this_page_activities = []
            for act in data["data"]:
                startTime = pytz.utc.localize(datetime.strptime(act["start_time"], "%Y-%m-%d %H:%M:%S UTC"))
                if earliestDate is None or startTime < earliestDate:  # probably redundant, I would assume it works out the TZes...
                    earliestDate = startTime
                logger.debug("activity pre")
                if "tracking" in act and act["tracking"]:
                    logger.warning("\t tracking")
                    exclusions.append(APIExcludeActivity("In progress", activityId=act["id"], permanent=False))
                    continue  # come back once they've completed the activity
                track_ids.append(act["id"])
                activity = UploadedActivity()
                activity.StartTime = startTime
                activity.EndTime = activity.StartTime + timedelta(0, round(act["duration_sec"]))
                logger.debug("\tActivity s/t " + str(activity.StartTime))

                activity.Stationary = not act["has_points"]

                if int(act["sport"]) in self._activityMappings:
                    activity.Type = self._activityMappings[int(act["sport"])]
                activity.ServiceData = {"ActivityID": act["id"]}

                this_page_activities.append(activity)
            cached_track_tzs = cachedb.endomondo_activity_cache.find({"TrackID":{"$in": track_ids}})
            cached_track_tzs = dict([(x["TrackID"], x) for x in cached_track_tzs])
            logger.debug("Have" + str(len(cached_track_tzs.keys())) + "/" + str(len(track_ids)) + " cached TZ records")

            for activity in this_page_activities:
                # attn service makers: why #(*%$ can't you all agree to use naive local time. So much simpler.
                cachedTrackData = None
                track_id = activity.ServiceData["ActivityID"]

                if track_id not in cached_track_tzs:
                    logger.debug("\t Resolving TZ for %s" % activity.StartTime)
                    cachedTrackData = self._downloadRawTrackRecord(serviceRecord, track_id)
                    try:
                        self._populateActivityFromTrackData(activity, cachedTrackData, minimumWaypoints=True)
                    except APIExcludeActivity as e:
                        e.ExternalActivityID = track_id
                        logger.info("Encountered APIExcludeActivity %s" % str(e))
                        exclusions.append(e)
                        continue

                    if not activity.TZ and not activity.Stationary:
                        logger.info("Couldn't determine TZ")
                        exclusions.append(APIExcludeActivity("Couldn't determine TZ", activityId=track_id))
                        continue
                    cachedTrackRecord = {"Owner": serviceRecord.ExternalID, "TrackID": track_id, "TZ": pickle.dumps(activity.TZ), "StartTime": activity.StartTime}
                    cachedb.endomondo_activity_cache.insert(cachedTrackRecord)
                elif not activity.Stationary:
                    activity.TZ = pickle.loads(cached_track_tzs[track_id]["TZ"])
                    activity.AdjustTZ()  # Everything returned is in UTC

                activity.Laps = []
                if int(act["sport"]) in self._activityMappings:
                    activity.Type = self._activityMappings[int(act["sport"])]

                activity.ServiceData = {"ActivityID": act["id"], "ActivityData": cachedTrackData}
                activity.CalculateUID()
                activities.append(activity)

            if not paged:
                earliestFirstPageDate = earliestDate
            if not exhaustive or ("more" in data and data["more"] is False):
                break
            else:
                paged = True
        return activities, exclusions
Exemplo n.º 30
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []

        url = self.SetioDomain + "getRunsByUserId"
        extID = svcRecord.ExternalID

        payload = {"userId": extID}
        headers = {
            'content-type': "application/json",
            'cache-control': "no-cache",
        }
        response = requests.post(url, data=json.dumps(payload), headers=headers)
        try:
            reqdata = response.json()
        except ValueError:
            raise APIException("Failed parsing Setio list response %s - %s" % (resp.status_code, resp.text))

        for ride in reqdata:
            activity = UploadedActivity()
            activity.StartTime = datetime.strptime(
                datetime.utcfromtimestamp(ride["startTimeStamp"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S")
            if "stopTimeStamp" in ride:
                activity.EndTime = datetime.strptime(
                    datetime.utcfromtimestamp(ride["stopTimeStamp"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S")
            activity.ServiceData = {"ActivityID": ride["runId"], "Manual": "False"}

            activity.Name = ride["programName"]

            logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, activity.Name))
            activity.Type = ActivityType.Running
            if "totalDistance" in ride:
                activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=ride["totalDistance"])

            if "averageCadence" in ride:
                activity.Stats.Cadence.update(
                    ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=ride["averageCadence"]))

            if "averageSpeed" in ride:
                activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.MetersPerSecond,
                                                         avg=ride["averageSpeed"])

            # get comment
            url = self.SetioDomain + "getRunComment"
            payload = { "userId": extID, "runId": activity.ServiceData["ActivityID"]}
            headers = {
                'content-type': "application/json",
                'cache-control': "no-cache",
            }
            streamdata = requests.post(url, data=json.dumps(payload), headers=headers)
            if streamdata.status_code == 500:
                raise APIException("Internal server error")

            if streamdata.status_code == 403:
                raise APIException("No authorization to download activity", block=True,
                                   user_exception=UserException(UserExceptionType.Authorization,
                                                                intervention_required=True))

            activity.Notes = None
            if streamdata.status_code == 200:  # Ok
                try:
                    commentdata = streamdata.json()
                except:
                    raise APIException("Stream data returned is not JSON")

                if "comment" in commentdata:
                    activity.Notes = commentdata["comment"]

            activity.GPS = True

            activity.Private = False
            activity.Stationary = False  # True = no sensor data

            activity.CalculateUID()
            activities.append(activity)

        return activities, exclusions
Exemplo n.º 31
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        headers = self._getAuthHeaders(serviceRecord)
        activities = []
        exclusions = []
        pageUri = self.OpenFitEndpoint + "/fitnessActivities.json"

        activity_tz_cache_raw = cachedb.sporttracks_meta_cache.find_one({"ExternalID": serviceRecord.ExternalID})
        activity_tz_cache_raw = activity_tz_cache_raw if activity_tz_cache_raw else {"Activities":[]}
        activity_tz_cache = dict([(x["ActivityURI"], x["TZ"]) for x in activity_tz_cache_raw["Activities"]])

        while True:
            logger.debug("Req against " + pageUri)
            res = requests.get(pageUri, headers=headers)
            try:
                res = res.json()
            except ValueError:
                raise APIException("Could not decode activity list response %s %s" % (res.status_code, res.text))
            for act in res["items"]:
                activity = UploadedActivity()
                activity.ServiceData = {"ActivityURI": act["uri"]}

                if len(act["name"].strip()):
                    activity.Name = act["name"]
                    # Longstanding ST.mobi bug causes it to return negative partial-hour timezones as "-2:-30" instead of "-2:30"
                fixed_start_time = re.sub(r":-(\d\d)", r":\1", act["start_time"])
                activity.StartTime = dateutil.parser.parse(fixed_start_time)
                if isinstance(activity.StartTime.tzinfo, tzutc):
                    activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value.
                else:
                    activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo.utcoffset(activity.StartTime).total_seconds() / 60)  # Convert the dateutil lame timezones into pytz awesome timezones.

                activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ)
                activity.EndTime = activity.StartTime + timedelta(seconds=float(act["duration"]))
                activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(act["duration"]))  # OpenFit says this excludes paused times.

                # Sometimes activities get returned with a UTC timezone even when they are clearly not in UTC.
                if activity.TZ == pytz.utc:
                    if act["uri"] in activity_tz_cache:
                        activity.TZ = pytz.FixedOffset(activity_tz_cache[act["uri"]])
                    else:
                        # So, we get the first location in the activity and calculate the TZ from that.
                        try:
                            firstLocation = self._downloadActivity(serviceRecord, activity, returnFirstLocation=True)
                        except APIExcludeActivity:
                            pass
                        else:
                            try:
                                activity.CalculateTZ(firstLocation, recalculate=True)
                            except:
                                # We tried!
                                pass
                            else:
                                activity.AdjustTZ()
                            finally:
                                activity_tz_cache[act["uri"]] = activity.StartTime.utcoffset().total_seconds() / 60

                logger.debug("Activity s/t " + str(activity.StartTime))
                activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(act["total_distance"]))

                types = [x.strip().lower() for x in act["type"].split(":")]
                types.reverse()  # The incoming format is like "walking: hiking" and we want the most specific first
                activity.Type = None
                for type_key in types:
                    if type_key in self._activityMappings:
                        activity.Type = self._activityMappings[type_key]
                        break
                if not activity.Type:
                    exclusions.append(APIExcludeActivity("Unknown activity type %s" % act["type"], activityId=act["uri"], userException=UserException(UserExceptionType.Other)))
                    continue

                activity.CalculateUID()
                activities.append(activity)
            if not exhaustive or "next" not in res or not len(res["next"]):
                break
            else:
                pageUri = res["next"]
        logger.debug("Writing back meta cache")
        cachedb.sporttracks_meta_cache.update({"ExternalID": serviceRecord.ExternalID}, {"ExternalID": serviceRecord.ExternalID, "Activities": [{"ActivityURI": k, "TZ": v} for k, v in activity_tz_cache.items()]}, upsert=True)
        return activities, exclusions
Exemplo n.º 32
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        ns = self._tp_ns
        activities = []
        exclusions = []

        reqData = self._authData(svcRecord)

        limitDateFormat = "%d %B %Y"

        if exhaustive:
            listEnd = datetime.now() + timedelta(
                days=1.5)  # Who knows which TZ it's in
            listStart = datetime(day=1, month=1,
                                 year=1980)  # The beginning of time
        else:
            listEnd = datetime.now() + timedelta(
                days=1.5)  # Who knows which TZ it's in
            listStart = listEnd - timedelta(days=20)  # Doesn't really matter

        lastActivityDay = None
        discoveredWorkoutIds = []
        while True:
            reqData.update({
                "startDate": listStart.strftime(limitDateFormat),
                "endDate": listEnd.strftime(limitDateFormat)
            })
            print("Requesting %s to %s" % (listStart, listEnd))
            resp = requests.post(
                "https://www.trainingpeaks.com/tpwebservices/service.asmx/GetWorkoutsForAthlete",
                data=reqData)
            xresp = etree.XML(resp.content)
            for xworkout in xresp:
                activity = UploadedActivity()

                workoutId = xworkout.find("tpw:WorkoutId", namespaces=ns).text

                workoutDayEl = xworkout.find("tpw:WorkoutDay", namespaces=ns)
                startTimeEl = xworkout.find("tpw:StartTime", namespaces=ns)

                workoutDay = dateutil.parser.parse(workoutDayEl.text)
                startTime = dateutil.parser.parse(
                    startTimeEl.text
                ) if startTimeEl is not None and startTimeEl.text else None

                if lastActivityDay is None or workoutDay.replace(
                        tzinfo=None) > lastActivityDay:
                    lastActivityDay = workoutDay.replace(tzinfo=None)

                if startTime is None:
                    continue  # Planned but not executed yet.
                activity.StartTime = startTime

                endTimeEl = xworkout.find("tpw:TimeTotalInSeconds",
                                          namespaces=ns)
                if not endTimeEl.text:
                    exclusions.append(
                        APIExcludeActivity("Activity has no duration",
                                           activity_id=workoutId,
                                           user_exception=UserException(
                                               UserExceptionType.Corrupt)))
                    continue

                activity.EndTime = activity.StartTime + timedelta(
                    seconds=float(endTimeEl.text))

                distEl = xworkout.find("tpw:DistanceInMeters", namespaces=ns)
                if distEl.text:
                    activity.Stats.Distance = ActivityStatistic(
                        ActivityStatisticUnit.Meters, value=float(distEl.text))
                # PWX is damn near comprehensive, no need to fill in any of the other statisitcs here, really

                if workoutId in discoveredWorkoutIds:
                    continue  # There's the possibility of query overlap, if there are multiple activities on a single day that fall across the query return limit
                discoveredWorkoutIds.append(workoutId)

                workoutTypeEl = xworkout.find("tpw:WorkoutTypeDescription",
                                              namespaces=ns)
                if workoutTypeEl.text:
                    if workoutTypeEl.text == "Day Off":
                        continue  # TrainingPeaks has some weird activity types...
                    if workoutTypeEl.text not in self._workoutTypeMappings:
                        exclusions.append(
                            APIExcludeActivity("Activity type %s unknown" %
                                               workoutTypeEl.text,
                                               activity_id=workoutId,
                                               user_exception=UserException(
                                                   UserExceptionType.Corrupt)))
                        continue
                    activity.Type = self._workoutTypeMappings[
                        workoutTypeEl.text]

                activity.ServiceData = {"WorkoutID": workoutId}
                activity.CalculateUID()
                activities.append(activity)

            if not exhaustive:
                break

            # Since TP only lets us query by date range, to get full activity history we need to query successively smaller ranges
            if len(xresp):
                if listStart == lastActivityDay:
                    break  # This wouldn't work if you had more than #MaxQueryReturn activities on that day - but that number is probably 50+
                listStart = lastActivityDay
            else:
                break  # We're done

        return activities, exclusions
Exemplo n.º 33
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        logger.debug("Checking motivato premium state")
        self._applyPaymentState(serviceRecord)

        logger.debug("Motivato DownloadActivityList")
        session = self._get_session(record=serviceRecord)
        activities = []
        exclusions = []

        self._rate_limit()

        retried_auth = False
        #headers = {'X-App-With-Tracks': "true"}
        headers = {}
        res = session.post(self._urlRoot + "/api/workouts/sync",
                           headers=headers)

        if res.status_code == 403 and not retried_auth:
            retried_auth = True
            session = self._get_session(serviceRecord, skip_cache=True)

        try:
            respList = res.json()
        except ValueError:
            res_txt = res.text  # So it can capture in the log message
            raise APIException("Parse failure in Motivato list resp: %s" %
                               res.status_code)

        for actInfo in respList:
            if "duration" in actInfo:
                duration = self._durationToSeconds(actInfo["duration"])
            else:
                continue

            activity = UploadedActivity()
            if "time_start" in actInfo["metas"]:
                startTimeStr = actInfo["training_at"] + " " + actInfo["metas"][
                    "time_start"]
            else:
                startTimeStr = actInfo["training_at"] + " 00:00:00"

            activity.StartTime = self._parseDateTime(startTimeStr)
            activity.EndTime = self._parseDateTime(startTimeStr) + timedelta(
                seconds=duration)
            activity.Type = self._reverseActivityMappings[
                actInfo["discipline_id"]]
            activity.Stats.TimerTime = ActivityStatistic(
                ActivityStatisticUnit.Seconds, value=duration)
            if "distance" in actInfo:
                activity.Stats.Distance = ActivityStatistic(
                    ActivityStatisticUnit.Kilometers,
                    value=float(actInfo["distance"]))
            #activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerSecond, value=1.0/float(actInfo["metas"]["pace"]))

            activity.ServiceData = {"WorkoutID": int(actInfo["id"])}

            activity.CalculateUID()
            logger.debug("Generated UID %s" % activity.UID)
            activities.append(activity)

        return activities, exclusions
Exemplo n.º 34
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        activities = []
        session = self._get_session(record=serviceRecord)
        session.headers.update({"Accept": "application/json"})
        workouts_resp = session.get(
            "https://api.trainerroad.com/api/careerworkouts")

        if workouts_resp.status_code != 200:
            if workouts_resp.status_code == 401:
                raise APIException("Invalid login",
                                   block=True,
                                   user_exception=UserException(
                                       UserExceptionType.Authorization,
                                       intervention_required=True))
            raise APIException("Workout listing error")

        cached_record = cachedb.trainerroad_meta.find_one(
            {"ExternalID": serviceRecord.ExternalID})
        if not cached_record:
            cached_workout_meta = {}
        else:
            cached_workout_meta = cached_record["Workouts"]

        workouts = workouts_resp.json()
        for workout in workouts:
            # Un/f their API doesn't provide the start/end times in the list response
            # So we need to pull the extra data, if it's not already cached
            workout_id = str(workout["Id"])  # Mongo doesn't do non-string keys
            if workout_id not in cached_workout_meta:
                meta_resp = session.get(
                    "https://api.trainerroad.com/api/careerworkouts?guid=%s" %
                    workout["Guid"])
                # We don't need everything
                full_meta = meta_resp.json()
                meta = {
                    key: full_meta[key]
                    for key in [
                        "WorkoutDate", "WorkoutName", "WorkoutNotes",
                        "TotalMinutes", "TotalKM", "AvgWatts", "Kj"
                    ]
                }
                cached_workout_meta[workout_id] = meta
            else:
                meta = cached_workout_meta[workout_id]

            activity = UploadedActivity()
            activity.ServiceData = {"ID": int(workout_id)}
            activity.Name = meta["WorkoutName"]
            activity.Notes = meta["WorkoutNotes"]
            activity.Type = ActivityType.Cycling

            # Everything's in UTC
            activity.StartTime = dateutil.parser.parse(
                meta["WorkoutDate"]).replace(tzinfo=pytz.utc)
            activity.EndTime = activity.StartTime + timedelta(
                minutes=meta["TotalMinutes"])

            activity.Stats.Distance = ActivityStatistic(
                ActivityStatisticUnit.Kilometers, value=meta["TotalKM"])
            activity.Stats.Power = ActivityStatistic(
                ActivityStatisticUnit.Watts, avg=meta["AvgWatts"])
            activity.Stats.Energy = ActivityStatistic(
                ActivityStatisticUnit.Kilojoules, value=meta["Kj"])

            activity.Stationary = False
            activity.GPS = False
            activity.CalculateUID()

            activities.append(activity)

        cachedb.trainerroad_meta.update(
            {"ExternalID": serviceRecord.ExternalID}, {
                "ExternalID": serviceRecord.ExternalID,
                "Workouts": cached_workout_meta
            },
            upsert=True)

        return activities, []
Exemplo n.º 35
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []
        before = earliestDate = None

        # define low parameter
        # get Garmin info

        # WARNING : BE CAREFULL ABOUT DATE FILTER
        # date filter of this request will follow this process :
        # - fetching activity with uploaded date between  upload_start_time & upload_end_time
        # - return matched activities
        # For example :
        # if you upload at 20-05-2019 an activity into Garmin with start date 01-01-2019
        # and you use upload_start_time=20-05-2019 & upload_end_time=21-05-2019
        # the 01-01-2019 will be return
        # So we download activities from upload date

        service_id = svcRecord._id
        user = db.users.find_one({
            'ConnectedServices': {
                '$elemMatch': {
                    'ID': service_id,
                    'Service': self.ID
                }
            }
        })

        afterDateObj = datetime.now() - timedelta(days=1)

        afterDate = afterDateObj.strftime("%Y-%m-%d")
        afterDate_tstmp = str(int(afterDateObj.timestamp()))
        date_now = datetime.now()
        now_tstmp = str(int(date_now.timestamp()))

        userID = svcRecord.ExternalID
        oauth_token = svcRecord.Authorization.get('OAuthToken')
        user_access_token = svcRecord.Authorization.get('AccessToken')
        user_access_token_secret = svcRecord.Authorization.get(
            'AccessTokenSecret')

        logging.info("\t Download Garmin Health activities since : " +
                     afterDate)
        logging.info("\t Building signin for activities summary")

        user_tokens = {
            'access_token': user_access_token,
            'access_token_secret': user_access_token_secret,
            'oauth_token': oauth_token
        }
        payload = ""
        start_date = afterDateObj
        index_total = 0
        while start_date < date_now:
            end_date = start_date + timedelta(seconds=86400)
            if end_date > date_now:
                end_date = date_now

            start_date_tmstmp = str(int(start_date.timestamp()))
            start_date_str = start_date.strftime("%Y-%m-%d")
            end_date_tmstmp = str(int(end_date.timestamp()))
            end_date_str = end_date.strftime("%Y-%m-%d")

            logging.info(
                "\t Download Garmin Health activities from %s to %s " %
                (start_date_str, end_date_str))

            signin_parameters = {
                'upload_start_time': start_date_tmstmp,
                'upload_end_time': end_date_tmstmp,
            }
            signin_info = self._request_signin('GET',
                                               self.URI_ACTIVITIES_SUMMARY,
                                               user_tokens,
                                               parameters=signin_parameters)

            resp = requests.request("GET",
                                    signin_info['path'],
                                    data=payload,
                                    headers=signin_info['header'])

            if resp.status_code != 204 and resp.status_code != 200:
                logging.info(
                    "\t An error occured while downloading Garmin Health activities from %s to %s "
                    % (start_date_str, end_date_str))

            json_data = resp.json()

            if json_data:
                for item in json_data:
                    index_total = index_total + 1
                    activity = UploadedActivity()

                    activity_name = item['activityType']
                    if item['deviceName'] is not 'unknown':
                        activity_name = activity_name + " - " + item[
                            'deviceName']

                    # parse date start to get timezone and date
                    activity.StartTime = datetime.utcfromtimestamp(
                        item['startTimeInSeconds'])
                    activity.TZ = pytz.utc

                    logging.debug("\tActivity start s/t %s: %s" %
                                  (activity.StartTime, activity_name))

                    activity.EndTime = activity.StartTime + timedelta(
                        seconds=item["durationInSeconds"])

                    activity.ServiceData = {"ActivityID": item["summaryId"]}
                    if "manual" in item:
                        activity.ServiceData['Manual'] = item["manual"]
                    else:
                        activity.ServiceData['Manual'] = False
                    # check if activity type ID exists
                    if item["activityType"] not in self._reverseActivityTypeMappings:
                        # TODO : Uncomment it when test are done
                        #exclusions.append(
                        #    APIExcludeActivity("Unsupported activity type %s" % item["activityType"],
                        #                       activity_id=item["summaryId"],
                        #                       user_exception=UserException(UserExceptionType.Other)))
                        logger.info("\t\tUnknown activity")
                        continue

                    activity.Type = self._reverseActivityTypeMappings[
                        item["activityType"]]

                    activity.Stats.Distance = ActivityStatistic(
                        ActivityStatisticUnit.Meters,
                        value=item["distanceInMeters"])

                    if "avgSpeedInMetersPerSecond" in item and "maxSpeedInMetersPerSecond" in item:
                        activity.Stats.Speed = ActivityStatistic(
                            ActivityStatisticUnit.MetersPerSecond,
                            avg=item["avgSpeedInMetersPerSecond"],
                            max=item["maxSpeedInMetersPerSecond"])
                    else:
                        if "avgSpeedInMetersPerSecond" in item:
                            activity.Stats.Speed = ActivityStatistic(
                                ActivityStatisticUnit.MetersPerSecond,
                                avg=item["avgSpeedInMetersPerSecond"])
                        if "maxSpeedInMetersPerSecond" in item:
                            activity.Stats.Speed = ActivityStatistic(
                                ActivityStatisticUnit.MetersPerSecond,
                                max=item["maxSpeedInMetersPerSecond"])

                    # Todo: find Garmin data name
                    # activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories,
                    #                                          value=ftbt_activity["calories"])
                    # Todo: find Garmin data name
                    # activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=ride[
                    #    "moving_time"] if "moving_time" in ride and ride[
                    #    "moving_time"] > 0 else None)  # They don't let you manually enter this, and I think it returns 0 for those activities.
                    # Todo: find Garmin data name
                    # if "average_watts" in ride:
                    #    activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts,
                    #                                             avg=ride["average_watts"])

                    # Todo: find Garmin data
                    # activity.GPS = ("start_latlng" in ride) and (ride["start_latlng"] is not None)

                    if "averageHeartRateInBeatsPerMinute" in item and "maxHeartRateInBeatsPerMinute" in item:
                        activity.Stats.HR.update(
                            ActivityStatistic(
                                ActivityStatisticUnit.BeatsPerMinute,
                                avg=item["averageHeartRateInBeatsPerMinute"],
                                max=item["maxHeartRateInBeatsPerMinute"]))
                    else:
                        if "averageHeartRateInBeatsPerMinute" in item:
                            activity.Stats.HR.update(
                                ActivityStatistic(
                                    ActivityStatisticUnit.BeatsPerMinute,
                                    avg=item[
                                        "averageHeartRateInBeatsPerMinute"]))
                        if "maxHeartRateInBeatsPerMinute" in item:
                            activity.Stats.HR.update(
                                ActivityStatistic(
                                    ActivityStatisticUnit.BeatsPerMinute,
                                    max=item["maxHeartRateInBeatsPerMinute"]))

                    # Todo: find Garmin data name
                    # if "average_cadence" in ride:
                    #    activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute,
                    #                                                    avg=ride["average_cadence"]))
                    # Todo: find Garmin data name
                    # if "average_temp" in ride:
                    #    activity.Stats.Temperature.update(
                    #        ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=ride["average_temp"]))

                    # Todo: find Garmin data name
                    if "calories" in item:
                        activity.Stats.Energy = ActivityStatistic(
                            ActivityStatisticUnit.Kilocalories,
                            value=item["calories"])
                    activity.Name = activity_name

                    activity.Private = False
                    activity.Stationary = False

                    activity.AdjustTZ()
                    activity.CalculateUID()
                    activities.append(activity)
                    logging.info("\t\t Garmin Activity ID : " +
                                 str(item["summaryId"]))

            start_date = end_date

        logging.info("\t\t total Garmin activities downloaded : " +
                     str(index_total))
        return activities, exclusions
Exemplo n.º 36
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []
        before = earliestDate = None

        # define low parameter
        limit = 20
        offset = 0
        sort = "desc"
        # get user Fitbit ID
        userID = svcRecord.ExternalID
        # get service Tapiriik ID
        service_id = svcRecord._id
        # get user "start sync from date" info
        # then prepare afterDate var (this var determine the date since we download activities)
        user = db.users.find_one({'ConnectedServices': {'$elemMatch': {'ID': service_id, 'Service': 'fitbit'}}})
        afterDateObj = datetime.now() - timedelta(days=1)

        if user['Config']['sync_skip_before'] is not None:
            afterDateObj = user['Config']['sync_skip_before']
        else:
            if exhaustive:
                afterDateObj = datetime.now() - timedelta(days=3650) # throw back to 10 years

        afterDate = afterDateObj.strftime("%Y-%m-%d")
        logging.info("\t Download Fitbit activities since : " + afterDate)

        # prepare parameters to set in fitbit request uri
        uri_parameters = {
            'limit': limit,
            'offset': offset,
            'sort': sort,
            'afterDate': afterDate,
            'token': svcRecord.Authorization.get('AccessToken')
        }
        # set base fitbit request uri
        activities_uri_origin = 'https://api.fitbit.com/1/user/' + userID + '/activities/list.json'

        # first execute offset = 0,
        # offset will be set to -1 if fitbit response don't give next pagination info
        # offset will be incremented by 1 if fitbit response give next pagination info
        index_total = 0
        while offset > -1:

            # prepare uri parameters
            uri_parameters['offset'] = offset
            # build fitbit uri with new parameters
            activities_uri = activities_uri_origin + "?" + urlencode(uri_parameters)
            # execute fitbit request using "request with auth" function (it refreshes token if needed)
            logging.info("\t\t downloading offset : " + str(offset))
            resp = self._requestWithAuth(lambda session: session.get(
                activities_uri,
                headers={
                    'Authorization': 'Bearer ' + svcRecord.Authorization.get('AccessToken')
                }), svcRecord)

            # check if request has error
            if resp.status_code != 204 and resp.status_code != 200:
                raise APIException("Unable to find Fitbit activities")

            # get request data
            data = {}
            try:
                data = resp.json()
            except ValueError:
                raise APIException("Failed parsing fitbit list response %s - %s" % (resp.status_code, resp.text))

            # if request return activities infos
            if data['activities']:
                ftbt_activities = data['activities']
                logging.info("\t\t nb activity : " + str(len(ftbt_activities)))

                # for every activities in this request pagination
                # (Fitbit give 20 activities MAXIMUM, use limit parameter)
                for ftbt_activity in ftbt_activities:
                    index_total = index_total +1
                    activity = UploadedActivity()

                    #parse date start to get timezone and date
                    parsedDate = ftbt_activity["startTime"][0:19] + ftbt_activity["startTime"][23:]
                    activity.StartTime = datetime.strptime(parsedDate, "%Y-%m-%dT%H:%M:%S%z")
                    activity.TZ = pytz.utc

                    logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, ftbt_activity["activityName"]))

                    activity.EndTime = activity.StartTime + timedelta(0, (ftbt_activity["duration"]/1000))
                    activity.ServiceData = {"ActivityID": ftbt_activity["logId"], "Manual": ftbt_activity["logType"]}

                    # check if activity type ID exists
                    if ftbt_activity["activityTypeId"] not in self._reverseActivityTypeMappings:
                        exclusions.append(APIExcludeActivity("Unsupported activity type %s" % ftbt_activity["activityTypeId"],
                                                             activity_id=ftbt_activity["logId"],
                                                             user_exception=UserException(UserExceptionType.Other)))
                        logger.info("\t\tUnknown activity")
                        continue

                    activity.Type = self._reverseActivityTypeMappings[ftbt_activity["activityTypeId"]]

                    activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers,
                                                                value=ftbt_activity["distance"])

                    if "speed" in ftbt_activity:
                        activity.Stats.Speed = ActivityStatistic(
                            ActivityStatisticUnit.KilometersPerHour,
                            avg=ftbt_activity["speed"],
                            max=ftbt_activity["speed"]
                        )
                    activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=ftbt_activity["calories"])
                    # Todo: find fitbit data name
                    #activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=ride[
                    #    "moving_time"] if "moving_time" in ride and ride[
                    #    "moving_time"] > 0 else None)  # They don't let you manually enter this, and I think it returns 0 for those activities.
                    # Todo: find fitbit data name
                    #if "average_watts" in ride:
                    #    activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts,
                    #                                             avg=ride["average_watts"])

                    if "averageHeartRate" in ftbt_activity:
                        activity.Stats.HR.update(
                            ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=ftbt_activity["averageHeartRate"]))
                    # Todo: find fitbit data name
                    #if "max_heartrate" in ride:
                    #    activity.Stats.HR.update(
                    #        ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=ride["max_heartrate"]))
                    # Todo: find fitbit data name
                    #if "average_cadence" in ride:
                    #    activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute,
                    #                                                    avg=ride["average_cadence"]))
                    # Todo: find fitbit data name
                    #if "average_temp" in ride:
                    #    activity.Stats.Temperature.update(
                    #        ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=ride["average_temp"]))

                    if "calories" in ftbt_activity:
                        activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories,
                                                                  value=ftbt_activity["calories"])
                    activity.Name = ftbt_activity["activityName"]


                    activity.Private = False
                    if ftbt_activity['logType'] is 'manual':
                        activity.Stationary = True
                    else:
                        activity.Stationary = False


                    # Todo: find fitbit data
                    #activity.GPS = ("start_latlng" in ride) and (ride["start_latlng"] is not None)
                    activity.AdjustTZ()
                    activity.CalculateUID()
                    activities.append(activity)
                    logging.info("\t\t Fitbit Activity ID : " + str(ftbt_activity["logId"]))

                if not exhaustive:
                    break
            # get next info for while condition and prepare offset for next request
            if 'next' not in data['pagination'] or not data['pagination']['next']:
                next = None
                offset = -1
            else:
                next = data['pagination']['next']
                offset = offset + 1

        logging.info("\t\t total Fitbit activities downloaded : " + str(index_total))
        return activities, exclusions
Exemplo n.º 37
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        oauthSession = self._oauthSession(serviceRecord)

        activities = []
        exclusions = []

        page_url = "https://api.endomondo.com/api/1/workouts"

        while True:
            resp = oauthSession.get(page_url)
            try:
                respList = resp.json()["data"]
            except ValueError:
                raise APIException("Error decoding activity list resp %s %s" % (resp.status_code, resp.text))
            for actInfo in respList:
                activity = UploadedActivity()
                activity.StartTime = self._parseDate(actInfo["start_time"])
                print("Activity s/t %s" % activity.StartTime)
                if "is_tracking" in actInfo and actInfo["is_tracking"]:
                    exclusions.append(APIExcludeActivity("Not complete", activityId=actInfo["id"], permanent=False, userException=UserException(UserExceptionType.LiveTracking)))
                    continue

                if "end_time" in actInfo:
                    activity.EndTime = self._parseDate(actInfo["end_time"])

                if actInfo["sport"] in self._activityMappings:
                    activity.Type = self._activityMappings[actInfo["sport"]]

                # "duration" is timer time
                if "duration_total" in actInfo:
                    activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(actInfo["duration_total"]))

                if "distance_total" in actInfo:
                    activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(actInfo["distance_total"]))

                if "calories_total" in actInfo:
                    activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=float(actInfo["calories_total"]))

                activity.Stats.Elevation = ActivityStatistic(ActivityStatisticUnit.Meters)

                if "altitude_max" in actInfo:
                    activity.Stats.Elevation.Max = float(actInfo["altitude_max"])

                if "altitude_min" in actInfo:
                    activity.Stats.Elevation.Min = float(actInfo["altitude_min"])

                if "total_ascent" in actInfo:
                    activity.Stats.Elevation.Gain = float(actInfo["total_ascent"])

                if "total_descent" in actInfo:
                    activity.Stats.Elevation.Loss = float(actInfo["total_descent"])

                activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour)
                if "speed_max" in actInfo:
                    activity.Stats.Speed.Max = float(actInfo["speed_max"])

                if "heart_rate_avg" in actInfo:
                    activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=float(actInfo["heart_rate_avg"]))

                if "heart_rate_max" in actInfo:
                    activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=float(actInfo["heart_rate_max"])))

                if "cadence_avg" in actInfo:
                    activity.Stats.Cadence = ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=int(actInfo["cadence_avg"]))

                if "cadence_max" in actInfo:
                    activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, max=int(actInfo["cadence_max"])))

                if "title" in actInfo:
                    activity.Name = actInfo["title"]

                activity.ServiceData = {"WorkoutID": int(actInfo["id"])}

                activity.CalculateUID()
                activities.append(activity)

            paging = resp.json()["paging"]
            if "next" not in paging or not paging["next"] or not exhaustive:
                break
            else:
                page_url = paging["next"]

        return activities, exclusions
Exemplo n.º 38
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []

        url = self.SingletrackerDomain + "getRidesByUserId"
        extID = svcRecord.ExternalID

        payload = {"userId": extID}
        headers = {
            'content-type': "application/json",
            'cache-control': "no-cache",
        }
        response = requests.post(url,
                                 data=json.dumps(payload),
                                 headers=headers)
        try:
            reqdata = response.json()
        except ValueError:
            raise APIException(
                "Failed parsing Singletracker list response %s - %s" %
                (resp.status_code, resp.text))

        for ride in reqdata:
            activity = UploadedActivity()
            activity.StartTime = datetime.strptime(
                datetime.utcfromtimestamp(
                    ride["startTime"]).strftime('%Y-%m-%d %H:%M:%S'),
                "%Y-%m-%d %H:%M:%S")
            if "stopTime" in ride:
                activity.EndTime = datetime.strptime(
                    datetime.utcfromtimestamp(
                        ride["stopTime"]).strftime('%Y-%m-%d %H:%M:%S'),
                    "%Y-%m-%d %H:%M:%S")
            activity.ServiceData = {
                "ActivityID": ride["rideId"],
                "Manual": "False"
            }

            activity.Name = ride["trackName"]

            logger.debug("\tActivity s/t %s: %s" %
                         (activity.StartTime, activity.Name))
            activity.Type = ActivityType.MountainBiking
            if "totalDistance" in ride:
                activity.Stats.Distance = ActivityStatistic(
                    ActivityStatisticUnit.Meters, value=ride["totalDistance"])

            if "avgSpeed" in ride:
                activity.Stats.Speed = ActivityStatistic(
                    ActivityStatisticUnit.MetersPerSecond,
                    avg=ride["avgSpeed"])
            activity.Notes = None

            activity.GPS = True

            activity.Private = False
            activity.Stationary = False  # True = no sensor data

            activity.CalculateUID()
            activities.append(activity)

        return activities, exclusions
Exemplo n.º 39
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        headers = self._getAuthHeaders(serviceRecord)
        activities = []
        exclusions = []
        pageUri = self.OpenFitEndpoint + "/fitnessActivities.json"

        activity_tz_cache_raw = cachedb.sporttracks_meta_cache.find_one(
            {"ExternalID": serviceRecord.ExternalID})
        activity_tz_cache_raw = activity_tz_cache_raw if activity_tz_cache_raw else {
            "Activities": []
        }
        activity_tz_cache = dict([(x["ActivityURI"], x["TZ"])
                                  for x in activity_tz_cache_raw["Activities"]
                                  ])

        while True:
            logger.debug("Req against " + pageUri)
            res = requests.get(pageUri, headers=headers)
            try:
                res = res.json()
            except ValueError:
                raise APIException(
                    "Could not decode activity list response %s %s" %
                    (res.status_code, res.text))
            for act in res["items"]:
                activity = UploadedActivity()
                activity.ServiceData = {"ActivityURI": act["uri"]}

                if len(act["name"].strip()):
                    activity.Name = act["name"]
                    # Longstanding ST.mobi bug causes it to return negative partial-hour timezones as "-2:-30" instead of "-2:30"
                fixed_start_time = re.sub(r":-(\d\d)", r":\1",
                                          act["start_time"])
                activity.StartTime = dateutil.parser.parse(fixed_start_time)
                if isinstance(activity.StartTime.tzinfo, tzutc):
                    activity.TZ = pytz.utc  # The dateutil tzutc doesn't have an _offset value.
                else:
                    activity.TZ = pytz.FixedOffset(
                        activity.StartTime.tzinfo.utcoffset(
                            activity.StartTime).total_seconds() / 60
                    )  # Convert the dateutil lame timezones into pytz awesome timezones.

                activity.StartTime = activity.StartTime.replace(
                    tzinfo=activity.TZ)
                activity.EndTime = activity.StartTime + timedelta(
                    seconds=float(act["duration"]))
                activity.Stats.TimerTime = ActivityStatistic(
                    ActivityStatisticUnit.Seconds,
                    value=float(act["duration"]
                                ))  # OpenFit says this excludes paused times.

                # Sometimes activities get returned with a UTC timezone even when they are clearly not in UTC.
                if activity.TZ == pytz.utc:
                    if act["uri"] in activity_tz_cache:
                        activity.TZ = pytz.FixedOffset(
                            activity_tz_cache[act["uri"]])
                    else:
                        # So, we get the first location in the activity and calculate the TZ from that.
                        try:
                            firstLocation = self._downloadActivity(
                                serviceRecord,
                                activity,
                                returnFirstLocation=True)
                        except APIExcludeActivity:
                            pass
                        else:
                            try:
                                activity.CalculateTZ(firstLocation,
                                                     recalculate=True)
                            except:
                                # We tried!
                                pass
                            else:
                                activity.AdjustTZ()
                            finally:
                                activity_tz_cache[
                                    act["uri"]] = activity.StartTime.utcoffset(
                                    ).total_seconds() / 60

                logger.debug("Activity s/t " + str(activity.StartTime))
                activity.Stats.Distance = ActivityStatistic(
                    ActivityStatisticUnit.Meters,
                    value=float(act["total_distance"]))

                types = [x.strip().lower() for x in act["type"].split(":")]
                types.reverse(
                )  # The incoming format is like "walking: hiking" and we want the most specific first
                activity.Type = None
                for type_key in types:
                    if type_key in self._activityMappings:
                        activity.Type = self._activityMappings[type_key]
                        break
                if not activity.Type:
                    exclusions.append(
                        APIExcludeActivity("Unknown activity type %s" %
                                           act["type"],
                                           activity_id=act["uri"],
                                           user_exception=UserException(
                                               UserExceptionType.Other)))
                    continue

                activity.CalculateUID()
                activities.append(activity)
            if not exhaustive or "next" not in res or not len(res["next"]):
                break
            else:
                pageUri = res["next"]
        logger.debug("Writing back meta cache")
        cachedb.sporttracks_meta_cache.update(
            {"ExternalID": serviceRecord.ExternalID}, {
                "ExternalID":
                serviceRecord.ExternalID,
                "Activities": [{
                    "ActivityURI": k,
                    "TZ": v
                } for k, v in activity_tz_cache.items()]
            },
            upsert=True)
        return activities, exclusions
Exemplo n.º 40
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []
        before = earliestDate = None

        while True:
            if before is not None and before < 0:
                break  # Caused by activities that "happened" before the epoch. We generally don't care about those activities...
            logger.debug("Req with before=" + str(before) + "/" +
                         str(earliestDate))
            self._globalRateLimit()
            resp = requests.get("https://www.strava.com/api/v3/athletes/" +
                                str(svcRecord.ExternalID) + "/activities",
                                headers=self._apiHeaders(svcRecord),
                                params={"before": before})
            if resp.status_code == 401:
                raise APIException(
                    "No authorization to retrieve activity list",
                    block=True,
                    user_exception=UserException(
                        UserExceptionType.Authorization,
                        intervention_required=True))

            earliestDate = None

            reqdata = resp.json()

            if not len(reqdata):
                break  # No more activities to see

            for ride in reqdata:
                activity = UploadedActivity()
                activity.TZ = pytz.timezone(
                    re.sub("^\([^\)]+\)\s*", "", ride["timezone"])
                )  # Comes back as "(GMT -13:37) The Stuff/We Want""
                activity.StartTime = pytz.utc.localize(
                    datetime.strptime(ride["start_date"],
                                      "%Y-%m-%dT%H:%M:%SZ"))
                logger.debug("\tActivity s/t %s: %s" %
                             (activity.StartTime, ride["name"]))
                if not earliestDate or activity.StartTime < earliestDate:
                    earliestDate = activity.StartTime
                    before = calendar.timegm(
                        activity.StartTime.astimezone(pytz.utc).timetuple())

                activity.EndTime = activity.StartTime + timedelta(
                    0, ride["elapsed_time"])
                activity.ServiceData = {
                    "ActivityID": ride["id"],
                    "Manual": ride["manual"]
                }

                if ride["type"] not in self._reverseActivityTypeMappings:
                    exclusions.append(
                        APIExcludeActivity("Unsupported activity type %s" %
                                           ride["type"],
                                           activity_id=ride["id"],
                                           user_exception=UserException(
                                               UserExceptionType.Other)))
                    logger.debug("\t\tUnknown activity")
                    continue

                activity.Type = self._reverseActivityTypeMappings[ride["type"]]
                activity.Stats.Distance = ActivityStatistic(
                    ActivityStatisticUnit.Meters, value=ride["distance"])
                if "max_speed" in ride or "average_speed" in ride:
                    activity.Stats.Speed = ActivityStatistic(
                        ActivityStatisticUnit.MetersPerSecond,
                        avg=ride["average_speed"]
                        if "average_speed" in ride else None,
                        max=ride["max_speed"] if "max_speed" in ride else None)
                activity.Stats.MovingTime = ActivityStatistic(
                    ActivityStatisticUnit.Seconds,
                    value=ride["moving_time"] if "moving_time" in ride
                    and ride["moving_time"] > 0 else None
                )  # They don't let you manually enter this, and I think it returns 0 for those activities.
                # Strava doesn't handle "timer time" to the best of my knowledge - although they say they do look at the FIT total_timer_time field, so...?
                if "average_watts" in ride:
                    activity.Stats.Power = ActivityStatistic(
                        ActivityStatisticUnit.Watts, avg=ride["average_watts"])
                if "average_heartrate" in ride:
                    activity.Stats.HR.update(
                        ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute,
                                          avg=ride["average_heartrate"]))
                if "max_heartrate" in ride:
                    activity.Stats.HR.update(
                        ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute,
                                          max=ride["max_heartrate"]))
                if "average_cadence" in ride:
                    activity.Stats.Cadence.update(
                        ActivityStatistic(
                            ActivityStatisticUnit.RevolutionsPerMinute,
                            avg=ride["average_cadence"]))
                if "average_temp" in ride:
                    activity.Stats.Temperature.update(
                        ActivityStatistic(ActivityStatisticUnit.DegreesCelcius,
                                          avg=ride["average_temp"]))
                if "calories" in ride:
                    activity.Stats.Energy = ActivityStatistic(
                        ActivityStatisticUnit.Kilocalories,
                        value=ride["calories"])
                activity.Name = ride["name"]
                activity.Private = ride["private"]
                activity.Stationary = ride["manual"]
                activity.GPS = ("start_latlng"
                                in ride) and (ride["start_latlng"] is not None)
                activity.AdjustTZ()
                activity.CalculateUID()
                activities.append(activity)

            if not exhaustive or not earliestDate:
                break

        return activities, exclusions
Exemplo n.º 41
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        """
        GET List of Activities as JSON File

        URL: http://app.velohero.com/export/workouts/json
        Parameters:
        user      = username
        pass      = password
        date_from = YYYY-MM-DD
        date_to   = YYYY-MM-DD
        """
        activities           = []
        exclusions           = []
        discoveredWorkoutIds = []

        params = self._add_auth_params({}, record=serviceRecord)

        limitDateFormat = "%Y-%m-%d"

        if exhaustive:
            listEnd   = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in
            listStart = datetime(day=1, month=1, year=1980) # The beginning of time
        else:
            listEnd = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in
            listStart = listEnd - timedelta(days=20) # Doesn't really matter

        params.update({"date_from": listStart.strftime(limitDateFormat), "date_to": listEnd.strftime(limitDateFormat)})
        logger.debug("Requesting %s to %s" % (listStart, listEnd))
        res = requests.get(self._urlRoot + "/export/workouts/json", params=params)

        if res.status_code != 200:
          if res.status_code == 403:
            raise APIException("Invalid login", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True))
          raise APIException("Unable to retrieve activity list")

        res.raise_for_status()
        try:
            res = res.json()
        except ValueError:
            raise APIException("Could not decode activity list")
        if "workouts" not in res:
            raise APIException("No activities")
        for workout in res["workouts"]:
            workoutId = int(workout["id"])
            if workoutId in discoveredWorkoutIds:
               continue # There's the possibility of query overlap
            discoveredWorkoutIds.append(workoutId)
            if workout["file"] is not "1":
               logger.debug("Skip workout with ID: " + str(workoutId) + " (no file)")
               continue # Skip activity without samples (no PWX export)

            activity = UploadedActivity()

            logger.debug("Workout ID: " + str(workoutId))
            # Duration (dur_time)
            duration = self._durationToSeconds(workout["dur_time"])
            activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=duration)
            # Start time (date_ymd, start_time)
            startTimeStr = workout["date_ymd"] + " " + workout["start_time"]
            activity.StartTime = self._parseDateTime(startTimeStr)
            # End time (date_ymd, start_time) + dur_time
            activity.EndTime = self._parseDateTime(startTimeStr) + timedelta(seconds=duration)
            # Sport (sport_id)
            if workout["sport_id"] in self._reverseActivityMappings:
                activity.Type = self._reverseActivityMappings[workout["sport_id"]]
            else:
                activity.Type = ActivityType.Other
            # Distance (dist_km)
            activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(workout["dist_km"]))
            # Workout is hidden
            activity.Private = workout["hide"] == "1"

            activity.ServiceData = {"workoutId": workoutId}
            activity.CalculateUID()
            activities.append(activity)

        return activities, exclusions
Exemplo n.º 42
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):

        activities = []
        exclusions = []
        earliestDate = None
        earliestFirstPageDate = None
        paged = False

        while True:
            before = "" if earliestDate is None else earliestDate.astimezone(
                pytz.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
            params = {
                "authToken": serviceRecord.Authorization["AuthToken"],
                "maxResults": 45,
                "before": before
            }
            logger.debug("Req with " + str(params))
            response = requests.get(
                "http://api.mobile.endomondo.com/mobile/api/workout/list",
                params=params)

            if response.status_code != 200:
                if response.status_code == 401 or response.status_code == 403:
                    raise APIException(
                        "No authorization to retrieve activity list",
                        block=True,
                        user_exception=UserException(
                            UserExceptionType.Authorization,
                            intervention_required=True))
                raise APIException("Unable to retrieve activity list " +
                                   str(response))
            data = response.json()

            if "error" in data and data["error"]["type"] == "AUTH_FAILED":
                raise APIException(
                    "No authorization to retrieve activity list",
                    block=True,
                    user_exception=UserException(
                        UserExceptionType.Authorization,
                        intervention_required=True))

            track_ids = []
            this_page_activities = []
            for act in data["data"]:
                startTime = pytz.utc.localize(
                    datetime.strptime(act["start_time"],
                                      "%Y-%m-%d %H:%M:%S UTC"))
                if earliestDate is None or startTime < earliestDate:  # probably redundant, I would assume it works out the TZes...
                    earliestDate = startTime
                logger.debug("activity pre")
                if "tracking" in act and act["tracking"]:
                    logger.warning("\t tracking")
                    exclusions.append(
                        APIExcludeActivity("In progress",
                                           activityId=act["id"],
                                           permanent=False))
                    continue  # come back once they've completed the activity
                track_ids.append(act["id"])
                activity = UploadedActivity()
                activity.StartTime = startTime
                activity.EndTime = activity.StartTime + timedelta(
                    0, round(act["duration_sec"]))
                logger.debug("\tActivity s/t " + str(activity.StartTime))

                activity.Stationary = not act["has_points"]

                if int(act["sport"]) in self._activityMappings:
                    activity.Type = self._activityMappings[int(act["sport"])]
                activity.ServiceData = {"ActivityID": act["id"]}

                this_page_activities.append(activity)
            cached_track_tzs = cachedb.endomondo_activity_cache.find(
                {"TrackID": {
                    "$in": track_ids
                }})
            cached_track_tzs = dict([(x["TrackID"], x)
                                     for x in cached_track_tzs])
            logger.debug("Have" + str(len(cached_track_tzs.keys())) + "/" +
                         str(len(track_ids)) + " cached TZ records")

            for activity in this_page_activities:
                # attn service makers: why #(*%$ can't you all agree to use naive local time. So much simpler.
                cachedTrackData = None
                track_id = activity.ServiceData["ActivityID"]

                if track_id not in cached_track_tzs:
                    logger.debug("\t Resolving TZ for %s" % activity.StartTime)
                    cachedTrackData = self._downloadRawTrackRecord(
                        serviceRecord, track_id)
                    try:
                        self._populateActivityFromTrackData(
                            activity, cachedTrackData, minimumWaypoints=True)
                    except APIExcludeActivity as e:
                        e.ExternalActivityID = track_id
                        logger.info("Encountered APIExcludeActivity %s" %
                                    str(e))
                        exclusions.append(e)
                        continue

                    if not activity.TZ and not activity.Stationary:
                        logger.info("Couldn't determine TZ")
                        exclusions.append(
                            APIExcludeActivity("Couldn't determine TZ",
                                               activityId=track_id))
                        continue
                    cachedTrackRecord = {
                        "Owner": serviceRecord.ExternalID,
                        "TrackID": track_id,
                        "TZ": pickle.dumps(activity.TZ),
                        "StartTime": activity.StartTime
                    }
                    cachedb.endomondo_activity_cache.insert(cachedTrackRecord)
                elif not activity.Stationary:
                    activity.TZ = pickle.loads(
                        cached_track_tzs[track_id]["TZ"])
                    activity.AdjustTZ()  # Everything returned is in UTC

                activity.Laps = []
                if int(act["sport"]) in self._activityMappings:
                    activity.Type = self._activityMappings[int(act["sport"])]

                activity.ServiceData = {
                    "ActivityID": act["id"],
                    "ActivityData": cachedTrackData
                }
                activity.CalculateUID()
                activities.append(activity)

            if not paged:
                earliestFirstPageDate = earliestDate
            if not exhaustive or ("more" in data and data["more"] is False):
                break
            else:
                paged = True
        return activities, exclusions
Exemplo n.º 43
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({
                "start": (page - 1) * pageSz,
                "limit": pageSz
            }))

            res = self._request_with_reauth(
                serviceRecord, lambda session: session.get(
                    "https://connect.garmin.com/modern/proxy/activity-search-service-1.0/json/activities",
                    params={
                        "start": (page - 1) * pageSz,
                        "limit": pageSz
                    }))

            try:
                res = res.json()["results"]
            except ValueError:
                res_txt = res.text  # So it can capture in the log message
                raise APIException("Parse failure in GC list resp: %s - %s" %
                                   (res.status_code, res.text))
            if "activities" not in res:
                break  # No activities on this page - empty account.
            for act in res["activities"]:
                act = act["activity"]
                activity = UploadedActivity()

                # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped...
                activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act
                activity.GPS = "endLatitude" in act

                activity.Private = act["privacy"]["key"] == "private"

                try:
                    activity.TZ = pytz.timezone(act["activityTimeZone"]["key"])
                except pytz.exceptions.UnknownTimeZoneError:
                    activity.TZ = pytz.FixedOffset(
                        float(act["activityTimeZone"]["offset"]) * 60)

                logger.debug("Name " + act["activityName"]["value"] + ":")
                if len(act["activityName"]["value"].strip(
                )) and act["activityName"][
                        "value"] != "Untitled":  # This doesn't work for internationalized accounts, oh well.
                    activity.Name = act["activityName"]["value"]

                if len(act["activityDescription"]["value"].strip()):
                    activity.Notes = act["activityDescription"]["value"]

                # beginTimestamp/endTimestamp is in UTC
                activity.StartTime = pytz.utc.localize(
                    datetime.utcfromtimestamp(
                        float(act["beginTimestamp"]["millis"]) / 1000))
                if "sumElapsedDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(
                        0, round(float(act["sumElapsedDuration"]["value"])))
                elif "sumDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(
                        minutes=float(act["sumDuration"]
                                      ["minutesSeconds"].split(":")[0]),
                        seconds=float(act["sumDuration"]
                                      ["minutesSeconds"].split(":")[1]))
                else:
                    activity.EndTime = pytz.utc.localize(
                        datetime.utcfromtimestamp(
                            float(act["endTimestamp"]["millis"]) / 1000))
                logger.debug("Activity s/t " + str(activity.StartTime) +
                             " on page " + str(page))
                activity.AdjustTZ()

                if "sumDistance" in act and float(
                        act["sumDistance"]["value"]) != 0:
                    activity.Stats.Distance = ActivityStatistic(
                        self._unitMap[act["sumDistance"]["uom"]],
                        value=float(act["sumDistance"]["value"]))

                if "device" in act and act["device"]["key"] != "unknown":
                    devId = DeviceIdentifier.FindMatchingIdentifierOfType(
                        DeviceIdentifierType.GC, {"Key": act["device"]["key"]})
                    ver_split = act["device"]["key"].split(".")
                    ver_maj = None
                    ver_min = None
                    if len(ver_split) == 4:
                        # 2.90.0.0
                        ver_maj = int(ver_split[0])
                        ver_min = int(ver_split[1])
                    activity.Device = Device(devId,
                                             verMaj=ver_maj,
                                             verMin=ver_min)

                activity.Type = self._resolveActivityType(
                    act["activityType"]["key"])

                activity.CalculateUID()

                activity.ServiceData = {"ActivityID": int(act["activityId"])}

                activities.append(activity)
            logger.debug("Finished page " + str(page) + " of " +
                         str(res["search"]["totalPages"]))
            if not exhaustive or int(res["search"]["totalPages"]) == page:
                break
            else:
                page += 1
        return activities, exclusions
Exemplo n.º 44
0
    def DownloadActivityList(self, svcRec, exhaustive=False):
        dbcl = self._getClient(svcRec)
        if not svcRec.Authorization["Full"]:
            syncRoot = "/"
        else:
            syncRoot = svcRec.Config["SyncRoot"]
        # Dropbox API v2 doesn't like / as root.
        if syncRoot == "/":
            syncRoot = ""
        # New Dropbox API prefers path_lower, it would seem.
        syncRoot = syncRoot.lower()

        # There used to be a massive affair going on here to cache the folder structure locally.
        # Dropbox API 2.0 doesn't support the hashes I need for that.
        # Oh well. Throw that data out now. Well, don't load it at all.
        cache = cachedb.dropbox_cache.find_one(
            {"ExternalID": svcRec.ExternalID}, {
                "ExternalID": True,
                "Activities": True
            })
        if cache is None:
            cache = {"ExternalID": svcRec.ExternalID, "Activities": {}}

        try:
            list_result = dbcl.files_list_folder(syncRoot, recursive=True)
        except dropbox.exceptions.DropboxException as e:
            self._raiseDbException(e)

        def cache_writeback():
            if "_id" in cache:
                cachedb.dropbox_cache.save(cache)
            else:
                insert_result = cachedb.dropbox_cache.insert(cache)
                cache["_id"] = insert_result.inserted_id

        activities = []
        exclusions = []
        discovered_activity_cache_keys = set()

        while True:
            for entry in list_result.entries:
                if not hasattr(entry, "rev"):
                    # Not a file -> we don't care.
                    continue
                path = entry.path_lower

                if not path.endswith(".gpx") and not path.endswith(".tcx"):
                    # Not an activity file -> we don't care.
                    continue

                if svcRec.Authorization["Full"]:
                    relPath = path.replace(syncRoot, "", 1)
                else:
                    relPath = path.replace("/Apps/tapiriik/", "",
                                           1)  # dropbox api is meh api

                hashedRelPath = self._hash_path(relPath)
                discovered_activity_cache_keys.add(hashedRelPath)
                if hashedRelPath in cache["Activities"]:
                    existing = cache["Activities"][hashedRelPath]
                else:
                    existing = None

                if existing and existing["Rev"] == entry.rev:
                    # don't need entire activity loaded here, just UID
                    act = UploadedActivity()
                    act.UID = existing["UID"]
                    try:
                        act.StartTime = datetime.strptime(
                            existing["StartTime"], "%H:%M:%S %d %m %Y %z")
                    except:
                        act.StartTime = datetime.strptime(
                            existing["StartTime"], "%H:%M:%S %d %m %Y"
                        )  # Exactly one user has managed to break %z :S
                    if "EndTime" in existing:  # some cached activities may not have this, it is not essential
                        act.EndTime = datetime.strptime(
                            existing["EndTime"], "%H:%M:%S %d %m %Y %z")
                else:
                    logger.debug("Retrieving %s (%s)" %
                                 (path, "outdated meta cache"
                                  if existing else "not in meta cache"))
                    # get the full activity
                    try:
                        act, rev = self._getActivity(svcRec, dbcl, path)
                    except APIExcludeActivity as e:
                        logger.info("Encountered APIExcludeActivity %s" %
                                    str(e))
                        exclusions.append(strip_context(e))
                        continue

                    try:
                        act.EnsureTZ()
                    except:
                        pass  # We tried.

                    act.Laps = [
                    ]  # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM.
                    cache["Activities"][hashedRelPath] = {
                        "Rev": rev,
                        "UID": act.UID,
                        "StartTime":
                        act.StartTime.strftime("%H:%M:%S %d %m %Y %z"),
                        "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")
                    }
                    # Incrementally update the cache db.
                    # Otherwise, if we crash later on in listing
                    # (due to OOM or similar), we'll never make progress on this account.
                    cache_writeback()
                tagRes = self._tagActivity(relPath)
                act.ServiceData = {"Path": path, "Tagged": tagRes is not None}

                act.Type = tagRes if tagRes is not None else ActivityType.Other

                logger.debug("Activity s/t %s" % act.StartTime)

                activities.append(act)

            # Perform pagination.
            if list_result.has_more:
                list_result = dbcl.files_list_folder_continue(
                    list_result.cursor)
            else:
                break

        # Drop deleted activities' records from cache.
        all_activity_cache_keys = set(cache["Activities"].keys())
        for deleted_key in all_activity_cache_keys - discovered_activity_cache_keys:
            del cache["Activities"][deleted_key]

        cache_writeback()
        return activities, exclusions
Exemplo n.º 45
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        # http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50
        cookies = self._get_cookies(record=serviceRecord)
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz}))
            self._rate_limit()
            res = requests.get(
                "http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities",
                params={"start": (page - 1) * pageSz, "limit": pageSz},
                cookies=cookies,
            )
            res = res.json()["results"]
            if "activities" not in res:
                break  # No activities on this page - empty account.
            for act in res["activities"]:
                act = act["activity"]
                if "sumDistance" not in act:
                    exclusions.append(APIExcludeActivity("No distance", activityId=act["activityId"]))
                    continue
                activity = UploadedActivity()

                if (
                    "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act
                ):  # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped...
                    activity.Stationary = True
                else:
                    activity.Stationary = False

                try:
                    activity.TZ = pytz.timezone(act["activityTimeZone"]["key"])
                except pytz.exceptions.UnknownTimeZoneError:
                    activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60)

                logger.debug("Name " + act["activityName"]["value"] + ":")
                if (
                    len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled"
                ):  # This doesn't work for internationalized accounts, oh well.
                    activity.Name = act["activityName"]["value"]

                if len(act["activityDescription"]["value"].strip()):
                    activity.Notes = act["activityDescription"]["value"]
                # beginTimestamp/endTimestamp is in UTC
                activity.StartTime = pytz.utc.localize(
                    datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"]) / 1000)
                )
                if "sumElapsedDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(
                        0, round(float(act["sumElapsedDuration"]["value"]))
                    )
                elif "sumDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(
                        minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]),
                        seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]),
                    )
                else:
                    activity.EndTime = pytz.utc.localize(
                        datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"]) / 1000)
                    )
                logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page))
                activity.AdjustTZ()
                # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time.
                activity.Stats.Distance = ActivityStatistic(
                    self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])
                )

                def mapStat(gcKey, statKey, type, useSourceUnits=False):
                    nonlocal activity, act
                    if gcKey in act:
                        value = float(act[gcKey]["value"])
                        if math.isinf(value):
                            return  # GC returns the minimum speed as "-Infinity" instead of 0 some times :S
                        activity.Stats.__dict__[statKey].update(
                            ActivityStatistic(self._unitMap[act[gcKey]["uom"]], **({type: value}))
                        )
                        if useSourceUnits:
                            activity.Stats.__dict__[statKey] = activity.Stats.__dict__[statKey].asUnits(
                                self._unitMap[act[gcKey]["uom"]]
                            )

                if "sumMovingDuration" in act:
                    activity.Stats.MovingTime = ActivityStatistic(
                        ActivityStatisticUnit.Time, value=timedelta(seconds=float(act["sumMovingDuration"]["value"]))
                    )

                if "sumDuration" in act:
                    activity.Stats.TimerTime = ActivityStatistic(
                        ActivityStatisticUnit.Time,
                        value=timedelta(
                            minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]),
                            seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]),
                        ),
                    )

                mapStat(
                    "minSpeed", "Speed", "min", useSourceUnits=True
                )  # We need to suppress conversion here, so we can fix the pace-speed issue below
                mapStat("maxSpeed", "Speed", "max", useSourceUnits=True)
                mapStat("weightedMeanSpeed", "Speed", "avg", useSourceUnits=True)
                mapStat("minAirTemperature", "Temperature", "min")
                mapStat("maxAirTemperature", "Temperature", "max")
                mapStat("weightedMeanAirTemperature", "Temperature", "avg")
                mapStat("sumEnergy", "Energy", "value")
                mapStat("maxHeartRate", "HR", "max")
                mapStat("weightedMeanHeartRate", "HR", "avg")
                mapStat("maxRunCadence", "RunCadence", "max")
                mapStat("weightedMeanRunCadence", "RunCadence", "avg")
                mapStat("maxBikeCadence", "Cadence", "max")
                mapStat("weightedMeanBikeCadence", "Cadence", "avg")
                mapStat("minPower", "Power", "min")
                mapStat("maxPower", "Power", "max")
                mapStat("weightedMeanPower", "Power", "avg")
                mapStat("minElevation", "Elevation", "min")
                mapStat("maxElevation", "Elevation", "max")
                mapStat("gainElevation", "Elevation", "gain")
                mapStat("lossElevation", "Elevation", "loss")

                # In Garmin Land, max can be smaller than min for this field :S
                if (
                    activity.Stats.Power.Max is not None
                    and activity.Stats.Power.Min is not None
                    and activity.Stats.Power.Min > activity.Stats.Power.Max
                ):
                    activity.Stats.Power.Min = None

                # To get it to match what the user sees in GC.
                if activity.Stats.RunCadence.Max is not None:
                    activity.Stats.RunCadence.Max *= 2
                if activity.Stats.RunCadence.Average is not None:
                    activity.Stats.RunCadence.Average *= 2

                # GC incorrectly reports pace measurements as kph/mph when they are in fact in min/km or min/mi
                if "minSpeed" in act:
                    if ":" in act["minSpeed"]["withUnitAbbr"] and activity.Stats.Speed.Min:
                        activity.Stats.Speed.Min = 60 / activity.Stats.Speed.Min
                if "maxSpeed" in act:
                    if ":" in act["maxSpeed"]["withUnitAbbr"] and activity.Stats.Speed.Max:
                        activity.Stats.Speed.Max = 60 / activity.Stats.Speed.Max
                if "weightedMeanSpeed" in act:
                    if ":" in act["weightedMeanSpeed"]["withUnitAbbr"] and activity.Stats.Speed.Average:
                        activity.Stats.Speed.Average = 60 / activity.Stats.Speed.Average

                # Similarly, they do weird stuff with HR at times - %-of-max and zones
                # ...and we can't just fix these, so we have to calculate it after the fact (blegh)
                recalcHR = False
                if "maxHeartRate" in act:
                    if "%" in act["maxHeartRate"]["withUnitAbbr"] or "z" in act["maxHeartRate"]["withUnitAbbr"]:
                        activity.Stats.HR.Max = None
                        recalcHR = True
                if "weightedMeanHeartRate" in act:
                    if (
                        "%" in act["weightedMeanHeartRate"]["withUnitAbbr"]
                        or "z" in act["weightedMeanHeartRate"]["withUnitAbbr"]
                    ):
                        activity.Stats.HR.Average = None
                        recalcHR = True

                activity.Type = self._resolveActivityType(act["activityType"]["key"])

                activity.CalculateUID()
                activity.ServiceData = {"ActivityID": act["activityId"], "RecalcHR": recalcHR}

                activities.append(activity)
            logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"]))
            if not exhaustive or int(res["search"]["totalPages"]) == page:
                break
            else:
                page += 1
        return activities, exclusions
Exemplo n.º 46
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        logger.debug("DownloadActivityList")
        allItems = []
        headers = self._apiHeaders(serviceRecord)
        nextRequest = '/v7.1/workout/?user=' + str(serviceRecord.ExternalID)
        while True:
            response = requests.get("https://api.mapmyfitness.com" +
                                    nextRequest,
                                    headers=headers)
            if response.status_code != 200:
                if response.status_code == 401 or response.status_code == 403:
                    raise APIException(
                        "No authorization to retrieve activity list",
                        block=True,
                        user_exception=UserException(
                            UserExceptionType.Authorization,
                            intervention_required=True))
                raise APIException(
                    "Unable to retrieve activity list " + str(response),
                    serviceRecord)
            data = response.json()
            allItems += data["_embedded"]["workouts"]
            nextLink = data["_links"].get("next")
            if not exhaustive or not nextLink:
                break
            nextRequest = nextLink[0]["href"]

        activities = []
        exclusions = []
        for act in allItems:
            # TODO catch exception and add to exclusions
            activity = UploadedActivity()
            activityID = act["_links"]["self"][0]["id"]
            activity.StartTime = datetime.strptime(act["start_datetime"],
                                                   "%Y-%m-%dT%H:%M:%S%z")
            activity.Notes = act["notes"] if "notes" in act else None

            # aggregate
            aggregates = act["aggregates"]
            elapsed_time_total = aggregates[
                "elapsed_time_total"] if "elapsed_time_total" in aggregates else "0"
            activity.EndTime = activity.StartTime + timedelta(
                0, round(float(elapsed_time_total)))
            activity.Stats.TimerTime = ActivityStatistic(
                ActivityStatisticUnit.Seconds, value=float(elapsed_time_total))
            activity.Stats.MovingTime = ActivityStatistic(
                ActivityStatisticUnit.Seconds, value=float(elapsed_time_total))
            if "active_time_total" in aggregates:
                activity.Stats.MovingTime = ActivityStatistic(
                    ActivityStatisticUnit.Seconds,
                    value=float(aggregates["active_time_total"]))

            if "distance_total" in aggregates:
                activity.Stats.Distance = ActivityStatistic(
                    ActivityStatisticUnit.Meters,
                    value=float(aggregates["distance_total"]))

            if "speed_min" in aggregates:
                activity.Stats.Speed.Min = float(aggregates["speed_min"])
            if "speed_max" in aggregates:
                activity.Stats.Speed.Max = float(aggregates["speed_max"])
            if "speed_avg" in aggregates:
                activity.Stats.Speed.Average = float(aggregates["speed_avg"])

            if "heartrate_min" in aggregates:
                activity.Stats.HR.update(
                    ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute,
                                      min=float(aggregates["heartrate_min"])))
            if "heartrate_max" in aggregates:
                activity.Stats.HR.update(
                    ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute,
                                      max=float(aggregates["heartrate_max"])))
            if "heartrate_avg" in aggregates:
                activity.Stats.HR = ActivityStatistic(
                    ActivityStatisticUnit.BeatsPerMinute,
                    avg=float(aggregates["heartrate_avg"]))

            if "cadence_min" in aggregates:
                activity.Stats.Cadence.update(
                    ActivityStatistic(
                        ActivityStatisticUnit.RevolutionsPerMinute,
                        min=int(aggregates["cadence_min"])))
            if "cadence_max" in aggregates:
                activity.Stats.Cadence.update(
                    ActivityStatistic(
                        ActivityStatisticUnit.RevolutionsPerMinute,
                        max=int(aggregates["cadence_max"])))
            if "cadence_avg" in aggregates:
                activity.Stats.Cadence = ActivityStatistic(
                    ActivityStatisticUnit.RevolutionsPerMinute,
                    avg=int(aggregates["cadence_avg"]))

            if "power_min" in aggregates:
                activity.Stats.Power.update(
                    ActivityStatistic(ActivityStatisticUnit.Watts,
                                      min=int(aggregates["power_min"])))
            if "power_max" in aggregates:
                activity.Stats.Power.update(
                    ActivityStatistic(ActivityStatisticUnit.Watts,
                                      max=int(aggregates["power_max"])))
            if "power_avg" in aggregates:
                activity.Stats.Power = ActivityStatistic(
                    ActivityStatisticUnit.Watts,
                    avg=int(aggregates["power_avg"]))

            activityTypeLink = act["_links"].get("activity_type")
            activityTypeID = activityTypeLink[0][
                "id"] if activityTypeLink is not None else None

            privacyLink = act["_links"].get("privacy")
            privacyID = privacyLink[0][
                "id"] if privacyLink is not None else None
            activity.Private = privacyID == "0"

            activity.Type = self._resolveActivityType(activityTypeID, headers)

            activity.ServiceData = {
                "ActivityID": activityID,
                "activityTypeID": activityTypeID,
                "privacyID": privacyID
            }
            activity.CalculateUID()
            activities.append(activity)
        return activities, exclusions
Exemplo n.º 47
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []
        before = earliestDate = None

        while True:
            if before is not None and before < 0:
                break # Caused by activities that "happened" before the epoch. We generally don't care about those activities...
            logger.debug("Req with before=" + str(before) + "/" + str(earliestDate))
            self._globalRateLimit()
            resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before})
            if resp.status_code == 401:
                raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True))

            earliestDate = None

            reqdata = resp.json()

            if not len(reqdata):
                break  # No more activities to see

            for ride in reqdata:
                activity = UploadedActivity()
                activity.TZ = pytz.timezone(re.sub("^\([^\)]+\)\s*", "", ride["timezone"]))  # Comes back as "(GMT -13:37) The Stuff/We Want""
                activity.StartTime = pytz.utc.localize(datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ"))
                logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, ride["name"]))
                if not earliestDate or activity.StartTime < earliestDate:
                    earliestDate = activity.StartTime
                    before = calendar.timegm(activity.StartTime.astimezone(pytz.utc).timetuple())

                activity.EndTime = activity.StartTime + timedelta(0, ride["elapsed_time"])
                activity.ServiceData = {"ActivityID": ride["id"], "Manual": ride["manual"]}

                if ride["type"] not in self._reverseActivityTypeMappings:
                    exclusions.append(APIExcludeActivity("Unsupported activity type %s" % ride["type"], activityId=ride["id"], userException=UserException(UserExceptionType.Other)))
                    logger.debug("\t\tUnknown activity")
                    continue

                activity.Type = self._reverseActivityTypeMappings[ride["type"]]
                activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=ride["distance"])
                if "max_speed" in ride or "average_speed" in ride:
                    activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.MetersPerSecond, avg=ride["average_speed"] if "average_speed" in ride else None, max=ride["max_speed"] if "max_speed" in ride else None)
                activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=ride["moving_time"] if "moving_time" in ride and ride["moving_time"] > 0 else None)  # They don't let you manually enter this, and I think it returns 0 for those activities.
                # Strava doesn't handle "timer time" to the best of my knowledge - although they say they do look at the FIT total_timer_time field, so...?
                if "average_watts" in ride:
                    activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, avg=ride["average_watts"])
                if "average_heartrate" in ride:
                    activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=ride["average_heartrate"]))
                if "max_heartrate" in ride:
                    activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=ride["max_heartrate"]))
                if "average_cadence" in ride:
                    activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=ride["average_cadence"]))
                if "average_temp" in ride:
                    activity.Stats.Temperature.update(ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=ride["average_temp"]))
                if "calories" in ride:
                    activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=ride["calories"])
                activity.Name = ride["name"]
                activity.Private = ride["private"]
                activity.Stationary = ride["manual"]
                activity.GPS = ("start_latlng" in ride) and (ride["start_latlng"] is not None)
                activity.AdjustTZ()
                activity.CalculateUID()
                activities.append(activity)

            if not exhaustive or not earliestDate:
                break

        return activities, exclusions
Exemplo n.º 48
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        oauthSession = self._oauthSession(serviceRecord)

        activities = []
        exclusions = []

        page_url = "https://api.endomondo.com/api/1/workouts"

        while True:
            resp = oauthSession.get(page_url)
            try:
                respList = resp.json()["data"]
            except ValueError:
                self._rateLimitBailout(resp)
                raise APIException("Error decoding activity list resp %s %s" %
                                   (resp.status_code, resp.text))
            for actInfo in respList:
                activity = UploadedActivity()
                activity.StartTime = self._parseDate(actInfo["start_time"])
                logger.debug("Activity s/t %s" % activity.StartTime)
                if "is_tracking" in actInfo and actInfo["is_tracking"]:
                    exclusions.append(
                        APIExcludeActivity(
                            "Not complete",
                            activity_id=actInfo["id"],
                            permanent=False,
                            user_exception=UserException(
                                UserExceptionType.LiveTracking)))
                    continue

                if "end_time" in actInfo:
                    activity.EndTime = self._parseDate(actInfo["end_time"])

                if actInfo["sport"] in self._activityMappings:
                    activity.Type = self._activityMappings[actInfo["sport"]]

                # "duration" is timer time
                if "duration_total" in actInfo:
                    activity.Stats.TimerTime = ActivityStatistic(
                        ActivityStatisticUnit.Seconds,
                        value=float(actInfo["duration_total"]))

                if "distance_total" in actInfo:
                    activity.Stats.Distance = ActivityStatistic(
                        ActivityStatisticUnit.Kilometers,
                        value=float(actInfo["distance_total"]))

                if "calories_total" in actInfo:
                    activity.Stats.Energy = ActivityStatistic(
                        ActivityStatisticUnit.Kilocalories,
                        value=float(actInfo["calories_total"]))

                activity.Stats.Elevation = ActivityStatistic(
                    ActivityStatisticUnit.Meters)

                if "altitude_max" in actInfo:
                    activity.Stats.Elevation.Max = float(
                        actInfo["altitude_max"])

                if "altitude_min" in actInfo:
                    activity.Stats.Elevation.Min = float(
                        actInfo["altitude_min"])

                if "total_ascent" in actInfo:
                    activity.Stats.Elevation.Gain = float(
                        actInfo["total_ascent"])

                if "total_descent" in actInfo:
                    activity.Stats.Elevation.Loss = float(
                        actInfo["total_descent"])

                activity.Stats.Speed = ActivityStatistic(
                    ActivityStatisticUnit.KilometersPerHour)
                if "speed_max" in actInfo:
                    activity.Stats.Speed.Max = float(actInfo["speed_max"])

                if "heart_rate_avg" in actInfo:
                    activity.Stats.HR = ActivityStatistic(
                        ActivityStatisticUnit.BeatsPerMinute,
                        avg=float(actInfo["heart_rate_avg"]))

                if "heart_rate_max" in actInfo:
                    activity.Stats.HR.update(
                        ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute,
                                          max=float(
                                              actInfo["heart_rate_max"])))

                if "cadence_avg" in actInfo:
                    activity.Stats.Cadence = ActivityStatistic(
                        ActivityStatisticUnit.RevolutionsPerMinute,
                        avg=int(actInfo["cadence_avg"]))

                if "cadence_max" in actInfo:
                    activity.Stats.Cadence.update(
                        ActivityStatistic(
                            ActivityStatisticUnit.RevolutionsPerMinute,
                            max=int(actInfo["cadence_max"])))

                if "power_avg" in actInfo:
                    activity.Stats.Power = ActivityStatistic(
                        ActivityStatisticUnit.Watts,
                        avg=int(actInfo["power_avg"]))

                if "power_max" in actInfo:
                    activity.Stats.Power.update(
                        ActivityStatistic(ActivityStatisticUnit.Watts,
                                          max=int(actInfo["power_max"])))

                if "title" in actInfo:
                    activity.Name = actInfo["title"]

                activity.ServiceData = {
                    "WorkoutID": int(actInfo["id"]),
                    "Sport": actInfo["sport"]
                }

                activity.CalculateUID()
                activities.append(activity)

            paging = resp.json()["paging"]
            if "next" not in paging or not paging["next"] or not exhaustive:
                break
            else:
                page_url = paging["next"]

        return activities, exclusions
Exemplo n.º 49
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []

        now = datetime.now()
        prev = now - timedelta(6 * 365 / 12)

        period = []

        aperiod = "%s%02d-%s%02d" % (prev.year, prev.month, now.year,
                                     now.month)
        period.append(aperiod)

        if exhaustive:
            for _ in range(20):
                now = prev
                prev = now - timedelta(6 * 365 / 12)
                aperiod = "%s%02d-%s%02d" % (prev.year, prev.month, now.year,
                                             now.month)
                period.append(aperiod)

        for dateInterval in period:
            headers = self._getAuthHeaders(svcRecord)
            resp = requests.get(self.ApiEndpoint + "/users/" +
                                str(svcRecord.ExternalID) +
                                "/activities.xml?date=" + dateInterval,
                                headers=headers)
            if resp.status_code == 400:
                logger.info(resp.content)
                raise APIException(
                    "No authorization to retrieve activity list",
                    block=True,
                    user_exception=UserException(
                        UserExceptionType.Authorization,
                        intervention_required=True))
            if resp.status_code == 401:
                logger.info(resp.content)
                raise APIException(
                    "No authorization to retrieve activity list",
                    block=True,
                    user_exception=UserException(
                        UserExceptionType.Authorization,
                        intervention_required=True))
            if resp.status_code == 403:
                logger.info(resp.content)
                raise APIException(
                    "No authorization to retrieve activity list",
                    block=True,
                    user_exception=UserException(
                        UserExceptionType.Authorization,
                        intervention_required=True))

            root = xml.fromstring(resp.content)

            logger.info("\t\t nb activity : " +
                        str(len(root.findall('.//ID'))))

            for ride in root.iter('ACTIVITY'):

                activity = UploadedActivity()
                activity.TZ = pytz.timezone("UTC")

                startdate = ride.find('.//STARTDATE').text + ride.find(
                    './/TIMEZONE').text
                datebase = parse(startdate)

                activity.StartTime = datebase  #pytz.utc.localize(datebase)

                activity.ServiceData = {
                    "ActivityID": ride.find('ID').text,
                    "Manual": ride.find('MANUAL').text
                }

                logger.info("\t\t DecathlonCoach Activity ID : " +
                            ride.find('ID').text)

                if ride.find('SPORTID'
                             ).text not in self._reverseActivityTypeMappings:
                    exclusions.append(
                        APIExcludeActivity("Unsupported activity type %s" %
                                           ride.find('SPORTID').text,
                                           activity_id=ride.find('ID').text,
                                           user_exception=UserException(
                                               UserExceptionType.Other)))
                    logger.info(
                        "\t\tDecathlonCoach Unknown activity, sport id " +
                        ride.find('SPORTID').text + " is not mapped")
                    continue

                activity.Type = self._reverseActivityTypeMappings[ride.find(
                    'SPORTID').text]

                for val in ride.iter('VALUE'):
                    if val.get('id') == self._unitMap["duration"]:
                        activity.EndTime = activity.StartTime + timedelta(
                            0, int(val.text))
                    if val.get('id') == self._unitMap["distance"]:
                        activity.Stats.Distance = ActivityStatistic(
                            ActivityStatisticUnit.Meters, value=int(val.text))
                    if val.get('id') == self._unitMap["kcal"]:
                        activity.Stats.Energy = ActivityStatistic(
                            ActivityStatisticUnit.Kilocalories,
                            value=int(val.text))
                    if val.get('id') == self._unitMap["speedaverage"]:
                        meterperhour = int(val.text)
                        meterpersecond = meterperhour / 3600
                        activity.Stats.Speed = ActivityStatistic(
                            ActivityStatisticUnit.MetersPerSecond,
                            avg=meterpersecond,
                            max=None)

                if ride.find('LIBELLE'
                             ).text == "" or ride.find('LIBELLE').text is None:
                    txtdate = startdate.split(' ')
                    activity.Name = "Sport DecathlonCoach " + txtdate[0]
                else:
                    activity.Name = ride.find('LIBELLE').text

                activity.Private = False
                activity.Stationary = ride.find('MANUAL').text
                activity.GPS = ride.find('ABOUT').find('TRACK').text
                activity.AdjustTZ()
                activity.CalculateUID()
                activities.append(activity)

        return activities, exclusions
Exemplo n.º 50
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []
        before = earliestDate = None

        while True:
            logger.debug("Req with before=" + str(before) + "/" + str(earliestDate))
            resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before})
            self._logAPICall("list", (svcRecord.ExternalID, str(earliestDate)), resp.status_code == 401)
            if resp.status_code == 401:
                raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True))

            earliestDate = None

            reqdata = resp.json()

            if not len(reqdata):
                break  # No more activities to see

            for ride in reqdata:
                activity = UploadedActivity()
                activity.TZ = pytz.timezone(re.sub("^\([^\)]+\)\s*", "", ride["timezone"]))  # Comes back as "(GMT -13:37) The Stuff/We Want""
                activity.StartTime = pytz.utc.localize(datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ"))
                logger.debug("\tActivity s/t " + str(activity.StartTime))
                if not earliestDate or activity.StartTime < earliestDate:
                    earliestDate = activity.StartTime
                    before = calendar.timegm(activity.StartTime.astimezone(pytz.utc).timetuple())

                manual = False  # Determines if we bother to "download" the activity afterwards
                if ride["start_latlng"] is None or ride["end_latlng"] is None:
                    manual = True

                activity.EndTime = activity.StartTime + timedelta(0, ride["elapsed_time"])
                activity.ServiceData = {"ActivityID": ride["id"], "Manual": manual}

                actType = [k for k, v in self._reverseActivityTypeMappings.items() if v == ride["type"]]
                if not len(actType):
                    exclusions.append(APIExcludeActivity("Unsupported activity type %s" % ride["type"], activityId=ride["id"]))
                    logger.debug("\t\tUnknown activity")
                    continue

                activity.Type = actType[0]
                activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=ride["distance"])
                if "max_speed" in ride or "average_speed" in ride:
                    activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour, avg=ride["average_speed"] if "average_speed" in ride else None, max=ride["max_speed"] if "max_speed" in ride else None)
                activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Time, value=timedelta(seconds=ride["moving_time"]) if "moving_time" in ride and ride["moving_time"] > 0 else None)  # They don't let you manually enter this, and I think it returns 0 for those activities.
                activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=ride["calories"] if "calories" in ride else None)
                if "average_watts" in ride:
                    activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, avg=ride["average_watts"])
                if "average_heartrate" in ride:
                    activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=ride["average_heartrate"]))
                if "max_heartrate" in ride:
                    activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=ride["max_heartrate"]))
                if "average_cadence" in ride:
                    activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=ride["average_cadence"]))
                if "average_temp" in ride:
                    activity.Stats.Temperature.update(ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=ride["average_temp"]))
                if "calories" in ride:
                    activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=ride["calories"])
                activity.Name = ride["name"]
                activity.Private = ride["private"]
                activity.Stationary = manual
                activity.AdjustTZ()
                activity.CalculateUID()
                activities.append(activity)

            if not exhaustive or not earliestDate:
                break

        return activities, exclusions