Пример #1
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        cookies = self._get_cookies(record=serviceRecord)
        activities = []
        exclusions = []
        pageUri = self.OpenFitEndpoint + "/fitnessActivities.json"
        while True:
            logger.debug("Req against " + pageUri)
            res = requests.get(pageUri, cookies=cookies)
            res = res.json()
            for act in res["items"]:
                activity = UploadedActivity()
                activity.ServiceData = {"ActivityURI": act["uri"]}

                if len(act["name"].strip()):
                    activity.Name = act["name"]
                activity.StartTime = dateutil.parser.parse(act["start_time"])
                if isinstance(activity.StartTime.tzinfo, tzutc):
                    activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value.
                else:
                    activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo._offset.total_seconds() / 60)  # Convert the dateutil lame timezones into pytz awesome timezones.

                activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ)
                activity.EndTime = activity.StartTime + timedelta(seconds=float(act["duration"]))
                activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Time, value=timedelta(seconds=float(act["duration"])))  # OpenFit says this excludes paused times.

                # Sometimes activities get returned with a UTC timezone even when they are clearly not in UTC.
                if activity.TZ == pytz.utc:
                    # So, we get the first location in the activity and calculate the TZ from that.
                    try:
                        firstLocation = self._downloadActivity(serviceRecord, activity, returnFirstLocation=True)
                    except APIExcludeActivity:
                        pass
                    else:
                        activity.CalculateTZ(firstLocation)
                        activity.AdjustTZ()

                logger.debug("Activity s/t " + str(activity.StartTime))
                activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(act["total_distance"]))

                types = [x.strip().lower() for x in act["type"].split(":")]
                types.reverse()  # The incoming format is like "walking: hiking" and we want the most specific first
                activity.Type = None
                for type_key in types:
                    if type_key in self._activityMappings:
                        activity.Type = self._activityMappings[type_key]
                        break
                if not activity.Type:
                    exclusions.append(APIExcludeActivity("Unknown activity type %s" % act["type"], activityId=act["uri"]))
                    continue

                activity.CalculateUID()
                activities.append(activity)
            if not exhaustive or "next" not in res or not len(res["next"]):
                break
            else:
                pageUri = res["next"]
        return activities, exclusions
Пример #2
0
 def _populateActivity(self, rawRecord):
     ''' Populate the 1st level of the activity object with all details required for UID from  API data '''
     activity = UploadedActivity()
     activity.StartTime = dateutil.parser.parse(rawRecord["start"])
     activity.EndTime = activity.StartTime + timedelta(seconds=rawRecord["duration"])
     activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=rawRecord["distance"])
     activity.GPS = rawRecord["hasGps"]
     activity.Stationary = not rawRecord["hasGps"]
     activity.CalculateUID()
     return activity
Пример #3
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        cookies = self._get_cookies(serviceRecord)
        activities = []
        exclusions = []
        pageUri = self.OpenFitEndpoint + "/fitnessActivities.json"
        while True:
            logger.debug("Req against " + pageUri)
            res = requests.get(pageUri, cookies=cookies)
            res = res.json()
            for act in res["items"]:
                activity = UploadedActivity()

                if len(act["name"].strip()):
                    activity.Name = act["name"]
                activity.StartTime = dateutil.parser.parse(act["start_time"])
                activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo._offset.total_seconds() / 60)  # Convert the dateutil lame timezones into pytz awesome timezones.
                activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ)
                activity.EndTime = activity.StartTime + timedelta(seconds=float(act["duration"]))

                logger.debug("Activity s/t " + str(activity.StartTime))
                activity.Distance = float(act["total_distance"])
                activity.Type = self._activityMappings[act["type"].lower()]

                activity.CalculateUID()
                activity.UploadedTo = [{"Connection": serviceRecord, "ActivityURI": act["uri"]}]
                activities.append(activity)
            if not exhaustive or "next" not in res or not len(res["next"]):
                break
            else:
                pageUri = res["next"]
        return activities, exclusions
Пример #4
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        logger.debug("Checking motivato premium state")
        self._applyPaymentState(serviceRecord)

        logger.debug("Motivato DownloadActivityList")
        session = self._get_session(record=serviceRecord)
        activities = []
        exclusions = []

        self._rate_limit()

        retried_auth = False
        #headers = {'X-App-With-Tracks': "true"}
        headers = {}
        res = session.post(self._urlRoot + "/api/workouts/sync", headers=headers)

        if res.status_code == 403 and not retried_auth:
            retried_auth = True
            session = self._get_session(serviceRecord, skip_cache=True)

        try:
            respList = res.json();
        except ValueError:
            res_txt = res.text # So it can capture in the log message
            raise APIException("Parse failure in Motivato list resp: %s" % res.status_code)

        for actInfo in respList:
            if "duration" in actInfo:
                duration = self._durationToSeconds(actInfo["duration"])
            else:
                continue

            activity = UploadedActivity()
            if "time_start" in actInfo["metas"]:
                startTimeStr = actInfo["training_at"] + " " + actInfo["metas"]["time_start"]
            else:
                startTimeStr = actInfo["training_at"] + " 00:00:00"

            activity.StartTime = self._parseDateTime(startTimeStr)
            activity.EndTime = self._parseDateTime(startTimeStr) + timedelta(seconds=duration)
            activity.Type = self._reverseActivityMappings[actInfo["discipline_id"]]
            activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=duration)
            if "distance" in actInfo:
                activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(actInfo["distance"]))
            #activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerSecond, value=1.0/float(actInfo["metas"]["pace"]))

            activity.ServiceData={"WorkoutID": int(actInfo["id"])}

            activity.CalculateUID()
            logger.debug("Generated UID %s" % activity.UID)
            activities.append(activity)


        return activities, exclusions
Пример #5
0
    def _populateActivity(self, rawRecord):
        ''' Populate the 1st level of the activity object with all details required for UID from RK API data '''
        activity = UploadedActivity()
        #  can stay local + naive here, recipient services can calculate TZ as required
        activity.StartTime = datetime.strptime(rawRecord["start_time"], "%a, %d %b %Y %H:%M:%S")
        activity.EndTime = activity.StartTime + timedelta(0, round(rawRecord["duration"]))  # this is inaccurate with pauses - excluded from hash
        activity.Distance = rawRecord["total_distance"]
        if rawRecord["type"] in self._activityMappings:
            activity.Type = self._activityMappings[rawRecord["type"]]

        activity.CalculateUID()
        return activity
Пример #6
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []
        before = earliestDate = None

        while True:
            logger.debug("Req with before=" + str(before) + "/" + str(earliestDate))
            resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before})
            self._logAPICall("list", (svcRecord.ExternalID, str(earliestDate)), resp.status_code == 401)
            if resp.status_code == 401:
                raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True))

            earliestDate = None

            reqdata = resp.json()

            if not len(reqdata):
                break  # No more activities to see

            for ride in reqdata:
                activity = UploadedActivity()
                activity.TZ = pytz.timezone(re.sub("^\([^\)]+\)\s*", "", ride["timezone"]))  # Comes back as "(GMT -13:37) The Stuff/We Want""
                activity.StartTime = pytz.utc.localize(datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ"))
                logger.debug("\tActivity s/t " + str(activity.StartTime))
                if not earliestDate or activity.StartTime < earliestDate:
                    earliestDate = activity.StartTime
                    before = calendar.timegm(activity.StartTime.astimezone(pytz.utc).timetuple())

                if ride["start_latlng"] is None or ride["end_latlng"] is None or ride["distance"] is None or ride["distance"] == 0:
                    exclusions.append(APIExcludeActivity("No path", activityId=ride["id"]))
                    logger.debug("\t\tNo pts")
                    continue  # stationary activity - no syncing for now


                activity.EndTime = activity.StartTime + timedelta(0, ride["elapsed_time"])
                activity.UploadedTo = [{"Connection": svcRecord, "ActivityID": ride["id"]}]

                actType = [k for k, v in self._reverseActivityTypeMappings.items() if v == ride["type"]]
                if not len(actType):
                    exclusions.append(APIExcludeActivity("Unsupported activity type %s" % ride["type"], activityId=ride["id"]))
                    logger.debug("\t\tUnknown activity")
                    continue

                activity.Type = actType[0]
                activity.Distance = ride["distance"]
                activity.Name = ride["name"]
                activity.Private = ride["private"]
                activity.AdjustTZ()
                activity.CalculateUID()
                activities.append(activity)

            if not exhaustive or not earliestDate:
                break

        return activities, exclusions
Пример #7
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        activities = []
        session = self._get_session(record=serviceRecord)
        session.headers.update({"Accept": "application/json"})
        workouts_resp = session.get("https://api.trainerroad.com/api/careerworkouts")

        if workouts_resp.status_code != 200:
            if workouts_resp.status_code == 401:
                raise APIException("Invalid login", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True))
            raise APIException("Workout listing error")

        cached_record = cachedb.trainerroad_meta.find_one({"ExternalID": serviceRecord.ExternalID})
        if not cached_record:
            cached_workout_meta = {}
        else:
            cached_workout_meta = cached_record["Workouts"]

        workouts = workouts_resp.json()
        for workout in workouts:
            # Un/f their API doesn't provide the start/end times in the list response
            # So we need to pull the extra data, if it's not already cached
            workout_id = str(workout["Id"]) # Mongo doesn't do non-string keys
            if workout_id not in cached_workout_meta:
                meta_resp = session.get("https://api.trainerroad.com/api/careerworkouts?guid=%s" % workout["Guid"])
                # We don't need everything
                full_meta = meta_resp.json()
                meta = {key: full_meta[key] for key in ["WorkoutDate", "WorkoutName", "WorkoutNotes", "TotalMinutes", "TotalKM", "AvgWatts", "Kj"]}
                cached_workout_meta[workout_id] = meta
            else:
                meta = cached_workout_meta[workout_id]

            activity = UploadedActivity()
            activity.ServiceData = {"ID": int(workout_id)}
            activity.Name = meta["WorkoutName"]
            activity.Notes = meta["WorkoutNotes"]
            activity.Type = ActivityType.Cycling

            # Everything's in UTC
            activity.StartTime = dateutil.parser.parse(meta["WorkoutDate"]).replace(tzinfo=pytz.utc)
            activity.EndTime = activity.StartTime + timedelta(minutes=meta["TotalMinutes"])

            activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=meta["TotalKM"])
            activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, avg=meta["AvgWatts"])
            activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilojoules, value=meta["Kj"])

            activity.Stationary = False
            activity.GPS = False
            activity.CalculateUID()

            activities.append(activity)

        cachedb.trainerroad_meta.update({"ExternalID": serviceRecord.ExternalID}, {"ExternalID": serviceRecord.ExternalID, "Workouts": cached_workout_meta}, upsert=True)

        return activities, []
Пример #8
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []

        url = self.SingletrackerDomain + "getRidesByUserId"
        extID = svcRecord.ExternalID

        payload = {"userId": extID}
        headers = {
            'content-type': "application/json",
            'cache-control': "no-cache",
        }
        response = requests.post(url, data=json.dumps(payload), headers=headers)
        try:
            reqdata = response.json()
        except ValueError:
            raise APIException("Failed parsing Singletracker list response %s - %s" % (resp.status_code, resp.text))

        for ride in reqdata:
            activity = UploadedActivity()
            activity.StartTime = datetime.strptime(
                datetime.utcfromtimestamp(ride["startTime"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S")
            if "stopTime" in ride:
                activity.EndTime = datetime.strptime(
                    datetime.utcfromtimestamp(ride["stopTime"]).strftime('%Y-%m-%d %H:%M:%S'), "%Y-%m-%d %H:%M:%S")
            activity.ServiceData = {"ActivityID": ride["rideId"], "Manual": "False"}

            activity.Name = ride["trackName"]

            logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, activity.Name))
            activity.Type = ActivityType.MountainBiking
            if "totalDistance" in ride:
                activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=ride["totalDistance"])

            if "avgSpeed" in ride:
                activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.MetersPerSecond,
                                                         avg=ride["avgSpeed"])
            activity.Notes = None

            activity.GPS = True

            activity.Private = False
            activity.Stationary = False  # True = no sensor data

            activity.CalculateUID()
            activities.append(activity)

        return activities, exclusions
Пример #9
0
 def _populateActivity(self, rawRecord):
     ''' Populate the 1st level of the activity object with all details required for UID from RK API data '''
     activity = UploadedActivity()
     #  can stay local + naive here, recipient services can calculate TZ as required
     activity.StartTime = datetime.strptime(rawRecord["start_time"], "%a, %d %b %Y %H:%M:%S")
     activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Time, value=timedelta(0, float(rawRecord["duration"]))) # P. sure this is moving time
     activity.EndTime = activity.StartTime + activity.Stats.MovingTime.Value # this is inaccurate with pauses - excluded from hash
     activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=rawRecord["total_distance"])
     # I'm fairly sure this is how the RK calculation works. I remember I removed something exactly like this from ST.mobi, but I trust them more than I trust myself to get the speed right.
     if (activity.EndTime - activity.StartTime).total_seconds() > 0:
         activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour, avg=activity.Stats.Distance.asUnits(ActivityStatisticUnit.Kilometers).Value / ((activity.EndTime - activity.StartTime).total_seconds() / 60 / 60))
     activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=rawRecord["total_calories"] if "total_calories" in rawRecord else None)
     if rawRecord["type"] in self._activityMappings:
         activity.Type = self._activityMappings[rawRecord["type"]]
     activity.CalculateUID()
     return activity
Пример #10
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        activities = []
        exclusions = []

        for act in self._getActivities(serviceRecord, exhaustive=exhaustive):
            activity = UploadedActivity()
            activity.StartTime = dateutil.parser.parse(act['startDateTimeLocal'])
            activity.EndTime = activity.StartTime + timedelta(seconds=act['duration'])
            _type = self._activityMappings.get(act['activityType'])
            if not _type:
                exclusions.append(APIExcludeActivity("Unsupported activity type %s" % act['activityType'],
                                                     activity_id=act["activityId"],
                                                     user_exception=UserException(UserExceptionType.Other)))
            activity.ServiceData = {"ActivityID": act['activityId']}
            activity.Type = _type
            activity.Notes = act['notes']
            activity.GPS = bool(act.get('startLatitude'))
            activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=act['distance'])
            activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=act['calories'])
            if 'heartRateMin' in act:
                activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, min=act['heartRateMin'],
                                                      max=act['heartRateMax'], avg=act['heartRateAverage'])
            activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=act['duration'])

            if 'temperature' in act:
                activity.Stats.Temperature = ActivityStatistic(ActivityStatisticUnit.DegreesCelcius,
                                                               avg=act['temperature'])
            activity.CalculateUID()
            logger.debug("\tActivity s/t %s", activity.StartTime)
            activities.append(activity)

        return activities, exclusions
Пример #11
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        # grumble grumble strava api sucks grumble grumble
        # http://app.strava.com/api/v1/rides?athleteId=id
        activities = []
        exclusions = []
        before = earliestDate = None

        while True:
            resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before})
            logger.debug("Req with before=" + str(before) + "/" + str(earliestDate))

            earliestDate = None

            reqdata = resp.json()

            if not len(reqdata):
                break  # No more activities to see

            for ride in reqdata:
                activity = UploadedActivity()
                activity.TZ = pytz.timezone(re.sub("^\([^\)]+\)\s*", "", ride["timezone"]))  # Comes back as "(GMT -13:37) The Stuff/We Want""
                activity.StartTime = pytz.utc.localize(datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ"))
                logger.debug("\tActivity s/t " + str(activity.StartTime))
                if not earliestDate or activity.StartTime < earliestDate:
                    earliestDate = activity.StartTime
                    before = calendar.timegm(activity.StartTime.astimezone(pytz.utc).timetuple())

                if ride["start_latlng"] is None or ride["end_latlng"] is None or ride["distance"] is None or ride["distance"] == 0:
                    exclusions.append(APIExcludeActivity("No path", activityId=ride["id"]))
                    continue  # stationary activity - no syncing for now
                if ride["start_latlng"] == ride["end_latlng"]:
                    exclusions.append(APIExcludeActivity("Only one waypoint", activityId=ride["id"]))
                    continue  # Only one waypoint, one would assume.


                activity.EndTime = activity.StartTime + timedelta(0, ride["elapsed_time"])
                activity.UploadedTo = [{"Connection": svcRecord, "ActivityID": ride["id"]}]

                actType = [k for k, v in self._reverseActivityTypeMappings.items() if v == ride["type"]]
                if not len(actType):
                    exclusions.append(APIExcludeActivity("Unsupported activity type", activityId=ride["id"]))
                    continue

                activity.Type = actType[0]
                activity.Distance = ride["distance"]
                activity.Name = ride["name"]
                activity.AdjustTZ()
                activity.CalculateUID()
                activities.append(activity)

            if not exhaustive or not earliestDate:
                break

        return activities, exclusions
Пример #12
0
    def DownloadActivityList(self, svcRec, exhaustive=False):
        dbcl = self._getClient(svcRec)
        if not svcRec.Authorization["Full"]:
            syncRoot = "/"
        else:
            syncRoot = svcRec.Config["SyncRoot"]
        cache = cachedb.dropbox_cache.find_one({"ExternalID": svcRec.ExternalID})
        if cache is None:
            cache = {"ExternalID": svcRec.ExternalID, "Structure": [], "Activities": {}}
        if "Structure" not in cache:
            cache["Structure"] = []
        self._folderRecurse(cache["Structure"], dbcl, syncRoot)

        activities = []
        exclusions = []

        for dir in cache["Structure"]:
            for file in dir["Files"]:
                path = file["Path"]
                if svcRec.Authorization["Full"]:
                    relPath = path.replace(syncRoot, "", 1)
                else:
                    relPath = path.replace("/Apps/tapiriik/", "", 1)  # dropbox api is meh api

                existing = [(k, x) for k, x in cache["Activities"].items() if x["Path"] == relPath]  # path is relative to syncroot to reduce churn if they relocate it
                existing = existing[0] if existing else None
                if existing is not None:
                    existUID, existing = existing
                if existing and existing["Rev"] == file["Rev"]:
                    # don't need entire activity loaded here, just UID
                    act = UploadedActivity()
                    act.UID = existUID
                    act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y %z")
                    if "EndTime" in existing:  # some cached activities may not have this, it is not essential
                        act.EndTime = datetime.strptime(existing["EndTime"], "%H:%M:%S %d %m %Y %z")
                else:
                    logger.debug("Retrieving %s (%s)" % (path, "outdated meta cache" if existing else "not in meta cache"))
                    # get the full activity
                    try:
                        act, rev = self._getActivity(svcRec, dbcl, path)
                    except APIExcludeActivity as e:
                        logger.info("Encountered APIExcludeActivity %s" % str(e))
                        exclusions.append(e)
                        continue
                    del act.Laps
                    act.Laps = []  # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM.
                    cache["Activities"][act.UID] = {"Rev": rev, "Path": relPath, "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"), "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")}
                tagRes = self._tagActivity(relPath)
                act.ServiceData = {"Path": path, "Tagged":tagRes is not None}

                act.Type = tagRes if tagRes is not None else ActivityType.Other

                logger.debug("Activity s/t %s" % act.StartTime)

                activities.append(act)

        cachedb.dropbox_cache.update({"ExternalID": svcRec.ExternalID}, cache, upsert=True)
        return activities, exclusions
Пример #13
0
    def _create_activity(self, data):
        activity = UploadedActivity()
        activity.Name = data.get("name")
        activity.StartTime = pytz.utc.localize(datetime.strptime(data.get("start_at"), "%Y-%m-%dT%H:%M:%SZ"))
        activity.EndTime = activity.StartTime + timedelta(0, float(data.get("duration")))
        sport_id = data.get("sport_id")
        activity.Type = self._reverseActivityMappings.get(int(sport_id), ActivityType.Other) if sport_id else ActivityType.Other

        distance = data.get("distance")
        activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(distance) if distance else None)
        activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(data.get("total_time_in_seconds")))
        avg_speed = data.get("average_speed")
        max_speed = data.get("max_speed")
        activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour, avg=float(avg_speed) if avg_speed else None, max=float(max_speed) if max_speed else None)
        avg_hr = data.get("average_heart_rate")
        max_hr = data.get("maximum_heart_rate")
        activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=float(avg_hr) if avg_hr else None, max=float(max_hr) if max_hr else None))
        calories = data.get("calories")
        activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=int(calories) if calories else None)

        activity.ServiceData = {"ActivityID": data.get("id")}

        logger.debug("\tActivity s/t {}: {}".format(activity.StartTime, activity.Type))
        activity.CalculateUID()
        return activity
Пример #14
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        # http://ridewithgps.com/users/1/trips.json?limit=200&order_by=created_at&order_dir=asc
        # offset also supported
        page = 1
        pageSz = 50
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz}))
            # TODO: take advantage of their nice ETag support
            params = {"offset": (page - 1) * pageSz, "limit": pageSz}
            params = self._add_auth_params(params, record=serviceRecord)

            res = requests.get("http://ridewithgps.com/users/{}/trips.json".format(serviceRecord.ExternalID), params=params)
            res = res.json()
            total_pages = math.ceil(int(res["results_count"]) / pageSz)
            for act in res["results"]:
                if "first_lat" not in act or "last_lat" not in act:
                    exclusions.append(APIExcludeActivity("No points", activityId=act["activityId"]))
                    continue
                if "distance" not in act:
                    exclusions.append(APIExcludeActivity("No distance", activityId=act["activityId"]))
                    continue
                activity = UploadedActivity()

                activity.TZ = pytz.timezone(act["time_zone"])

                logger.debug("Name " + act["name"] + ":")
                if len(act["name"].strip()):
                    activity.Name = act["name"]

                activity.StartTime = pytz.utc.localize(datetime.strptime(act["departed_at"], "%Y-%m-%dT%H:%M:%SZ"))
                activity.EndTime = activity.StartTime + timedelta(seconds=self._duration_to_seconds(act["duration"]))
                logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page))
                activity.AdjustTZ()

                activity.Distance = float(act["distance"])  # This value is already in meters...
                # Activity type is not implemented yet in RWGPS results; we will assume cycling, though perhaps "OTHER" wouuld be correct
                activity.Type = ActivityType.Cycling

                activity.CalculateUID()
                activity.UploadedTo = [{"Connection": serviceRecord, "ActivityID": act["id"]}]
                activities.append(activity)
            logger.debug("Finished page {} of {}".format(page, total_pages))
            if not exhaustive or total_pages == page or total_pages == 0:
                break
            else:
                page += 1
        return activities, exclusions
Пример #15
0
    def DownloadActivityList(self, svcRec, exhaustive=False):
        dbcl = self._getClient(svcRec)
        if not svcRec.Authorization["Full"]:
            syncRoot = "/"
        else:
            syncRoot = svcRec.Config["SyncRoot"]
        cache = cachedb.dropbox_cache.find_one({"ExternalID": svcRec.ExternalID})
        if cache is None:
            cache = {"ExternalID": svcRec.ExternalID, "Structure": [], "Activities": {}}
        if "Structure" not in cache:
            cache["Structure"] = []
        self._folderRecurse(cache["Structure"], dbcl, syncRoot)

        activities = []
        exclusions = []

        for dir in cache["Structure"]:
            for file in dir["Files"]:
                path = file["Path"]
                if svcRec.Authorization["Full"]:
                    relPath = path.replace(syncRoot, "", 1)
                else:
                    relPath = path.replace("/Apps/tapiriik/", "", 1)  # dropbox api is meh api

                existing = [(k, x) for k, x in cache["Activities"].items() if x["Path"] == relPath]  # path is relative to syncroot to reduce churn if they relocate it
                existing = existing[0] if existing else None
                if existing is not None:
                    existUID, existing = existing
                if existing and existing["Rev"] == file["Rev"]:
                    # don't need entire activity loaded here, just UID
                    act = UploadedActivity()
                    act.UID = existUID
                    act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y %z")
                    if "EndTime" in existing:  # some cached activities may not have this, it is not essential
                        act.EndTime = datetime.strptime(existing["EndTime"], "%H:%M:%S %d %m %Y %z")
                else:
                    # get the full activity
                    try:
                        act, rev = self._getActivity(dbcl, path)
                    except APIExcludeActivity as e:
                        exclusions.append(e)
                        continue
                    cache["Activities"][act.UID] = {"Rev": rev, "Path": relPath, "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"), "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")}
                act.UploadedTo = [{"Connection": svcRec, "Path": path}]
                tagRes = self._tagActivity(relPath)
                act.Tagged = tagRes is not None

                act.Type = tagRes if tagRes is not None else ActivityType.Other
                activities.append(act)

        cachedb.dropbox_cache.update({"ExternalID": svcRec.ExternalID}, cache, upsert=True)
        return activities, exclusions
Пример #16
0
    def _populateActivity(self, rawRecord):
        ''' Populate the 1st level of the activity object with all details required for UID from pulsstory API data '''
        activity = UploadedActivity()
        #  can stay local + naive here, recipient services can calculate TZ as required
        activity.Name = rawRecord["Name"] if "Name" in rawRecord else None
        activity.StartTime = datetime.strptime(rawRecord["StartTime"], "%Y-%m-%d %H:%M:%S")
        activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(rawRecord["Duration"]))
        activity.EndTime = activity.StartTime + timedelta(seconds=float(rawRecord["Duration"]))
        activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=rawRecord["Distance"])
        if (activity.EndTime - activity.StartTime).total_seconds() > 0:
            activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour, avg=activity.Stats.Distance.asUnits(ActivityStatisticUnit.Kilometers).Value / ((activity.EndTime - activity.StartTime).total_seconds() / 60 / 60))
        activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=rawRecord["Energy"] if "Energy" in rawRecord else None)
        if rawRecord["Type"] in self._activityMappings:
            activity.Type = self._activityMappings[rawRecord["Type"]]
        activity.GPS = rawRecord["HasPath"] if "HasPath" in rawRecord else False
        activity.Stationary = rawRecord["HasPoints"] if "HasPoints" in rawRecord else True
        activity.Notes = rawRecord["Notes"] if "Notes" in rawRecord else None
        activity.Private = rawRecord["Private"] if "Private" in rawRecord else True

        activity.CalculateUID()
        return activity
Пример #17
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        session = self._get_session(serviceRecord)
        list_params = self._with_auth(session, {"count": 20, "offset": 1})

        activities = []
        exclusions = []

        while True:
            list_resp = session.get("https://api.nike.com/me/sport/activities", params=list_params)
            list_resp = list_resp.json()

            for act in list_resp["data"]:
                activity = UploadedActivity()
                activity.ServiceData = {"ID": act["activityId"]}

                if act["status"] != "COMPLETE":
                    exclusions.append(APIExcludeActivity("Not complete", activityId=act["activityId"], permanent=False, userException=UserException(UserExceptionType.LiveTracking)))
                    continue

                activity.StartTime = dateutil.parser.parse(act["startTime"]).replace(tzinfo=pytz.utc)
                activity.EndTime = activity.StartTime + self._durationToTimespan(act["metricSummary"]["duration"])

                tz_name = act["activityTimeZone"]

                # They say these are all IANA standard names - they aren't
                if tz_name in self._timezones:
                    tz_name = self._timezones[tz_name]

                activity.TZ = pytz.timezone(tz_name)

                if act["activityType"] in self._activityMappings:
                    activity.Type = self._activityMappings[act["activityType"]]

                activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(act["metricSummary"]["distance"]))
                activity.Stats.Strides = ActivityStatistic(ActivityStatisticUnit.Strides, value=int(act["metricSummary"]["steps"]))
                activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=float(act["metricSummary"]["calories"]))
                activity.CalculateUID()
                activities.append(activity)

            if len(list_resp["data"]) == 0 or not exhaustive:
                break
            list_params["offset"] += list_params["count"]

        return activities, exclusions
Пример #18
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50
        cookies = self._get_cookies(record=serviceRecord)
        page = 1
        pageSz = 50
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz}))
            res = requests.get("http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}, cookies=cookies)
            res = res.json()["results"]
            if "activities" not in res:
                break  # No activities on this page - empty account.
            for act in res["activities"]:
                act = act["activity"]
                if "beginLatitude" not in act or "endLatitude" not in act or (act["beginLatitude"] is act["endLatitude"] and act["beginLongitude"] is act["endLongitude"]):
                    exclusions.append(APIExcludeActivity("No points", activityId=act["activityId"]))
                    continue
                if "sumDistance" not in act:
                    exclusions.append(APIExcludeActivity("No distance", activityId=act["activityId"]))
                    continue
                activity = UploadedActivity()

                try:
                    activity.TZ = pytz.timezone(act["activityTimeZone"]["key"])
                except pytz.exceptions.UnknownTimeZoneError:
                    activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60)

                logger.debug("Name " + act["activityName"]["value"] + ":")
                if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled":
                    activity.Name = act["activityName"]["value"]
                # beginTimestamp/endTimestamp is in UTC
                activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000))
                if "sumElapsedDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"])))
                elif "sumDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]))
                else:
                    activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000))
                logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page))
                activity.AdjustTZ()
                # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time.
                activity.Distance = float(act["sumDistance"]["value"]) * (1.60934 if act["sumDistance"]["uom"] == "mile" else 1) * 1000  # In meters...
                activity.Type = self._resolveActivityType(act["activityType"]["key"])

                activity.CalculateUID()
                activity.UploadedTo = [{"Connection": serviceRecord, "ActivityID": act["activityId"]}]
                activities.append(activity)
            logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"]))
            if not exhaustive or int(res["search"]["totalPages"]) == page:
                break
            else:
                page += 1
        return activities, exclusions
Пример #19
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz}))

            res = self._request_with_reauth(serviceRecord, lambda session: session.get("https://connect.garmin.com/modern/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}))

            try:
                res = res.json()["results"]
            except ValueError:
                res_txt = res.text # So it can capture in the log message
                raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res.text))
            if "activities" not in res:
                break  # No activities on this page - empty account.
            for act in res["activities"]:
                act = act["activity"]
                activity = UploadedActivity()

                # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped...
                activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act
                activity.GPS = "endLatitude" in act

                activity.Private = act["privacy"]["key"] == "private"

                try:
                    activity.TZ = pytz.timezone(act["activityTimeZone"]["key"])
                except pytz.exceptions.UnknownTimeZoneError:
                    activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60)

                logger.debug("Name " + act["activityName"]["value"] + ":")
                if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled": # This doesn't work for internationalized accounts, oh well.
                    activity.Name = act["activityName"]["value"]

                if len(act["activityDescription"]["value"].strip()):
                    activity.Notes = act["activityDescription"]["value"]

                # beginTimestamp/endTimestamp is in UTC
                activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000))
                if "sumElapsedDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"])))
                elif "sumDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]))
                else:
                    activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000))
                logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page))
                activity.AdjustTZ()

                if "sumDistance" in act and float(act["sumDistance"]["value"]) != 0:
                    activity.Stats.Distance = ActivityStatistic(self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"]))

                if "device" in act and act["device"]["key"] != "unknown":
                    devId = DeviceIdentifier.FindMatchingIdentifierOfType(DeviceIdentifierType.GC, {"Key": act["device"]["key"]})
                    ver_split = act["device"]["key"].split(".")
                    ver_maj = None
                    ver_min = None
                    if len(ver_split) == 4:
                        # 2.90.0.0
                        ver_maj = int(ver_split[0])
                        ver_min = int(ver_split[1])
                    activity.Device = Device(devId, verMaj=ver_maj, verMin=ver_min)

                activity.Type = self._resolveActivityType(act["activityType"]["key"])

                activity.CalculateUID()

                activity.ServiceData = {"ActivityID": int(act["activityId"])}

                activities.append(activity)
            logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"]))
            if not exhaustive or int(res["search"]["totalPages"]) == page:
                break
            else:
                page += 1
        return activities, exclusions
Пример #20
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?limit=20&start=0
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz}))

            res = self._request_with_reauth(lambda session: session.get("https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}), serviceRecord)

            try:
                res = res.json()
            except ValueError:
                res_txt = res.text # So it can capture in the log message
                raise APIException("Parse failure in GC list resp: %s - %s" % (res.status_code, res_txt))
            for act in res:
                activity = UploadedActivity()
                # stationary activities have movingDuration = None while non-gps static activities have 0.0
                activity.Stationary = act["movingDuration"] is None
                activity.GPS = act["hasPolyline"]

                activity.Private = act["privacy"]["typeKey"] == "private"

                activity_name = act["activityName"]
                logger.debug("Name " + activity_name if activity_name is not None else "Untitled" + ":")
                if activity_name is not None and len(activity_name.strip()) and activity_name != "Untitled": # This doesn't work for internationalized accounts, oh well.
                    activity.Name = activity_name

                activity_description = act["description"]
                if activity_description is not None and len(activity_description.strip()):
                    activity.Notes = activity_description

                activity.StartTime = pytz.utc.localize(datetime.strptime(act["startTimeGMT"], "%Y-%m-%d %H:%M:%S"))
                if act["elapsedDuration"] is not None:
                    activity.EndTime = activity.StartTime + timedelta(0, float(act["elapsedDuration"])/1000)
                else:
                    activity.EndTime = activity.StartTime + timedelta(0, float(act["duration"]))

                logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page))

                if "distance" in act and act["distance"] and float(act["distance"]) != 0:
                    activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(act["distance"]))

                activity.Type = self._resolveActivityType(act["activityType"]["typeKey"])

                activity.CalculateUID()

                activity.ServiceData = {"ActivityID": int(act["activityId"])}

                activities.append(activity)
            logger.debug("Finished page " + str(page))
            if not exhaustive or len(res) == 0:
                break
            else:
                page += 1
        return activities, exclusions
Пример #21
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []
        before = earliestDate = None

        # define low parameter
        limit = 20
        offset = 0
        sort = "desc"
        # get user Fitbit ID
        userID = svcRecord.ExternalID
        # get service Tapiriik ID
        service_id = svcRecord._id
        # get user "start sync from date" info
        # then prepare afterDate var (this var determine the date since we download activities)
        user = db.users.find_one({'ConnectedServices': {'$elemMatch': {'ID': service_id, 'Service': 'fitbit'}}})
        afterDateObj = datetime.now() - timedelta(days=1)

        if user['Config']['sync_skip_before'] is not None:
            afterDateObj = user['Config']['sync_skip_before']
        else:
            if exhaustive:
                afterDateObj = datetime.now() - timedelta(days=3650) # throw back to 10 years

        afterDate = afterDateObj.strftime("%Y-%m-%d")
        logging.info("\t Download Fitbit activities since : " + afterDate)

        # prepare parameters to set in fitbit request uri
        uri_parameters = {
            'limit': limit,
            'offset': offset,
            'sort': sort,
            'afterDate': afterDate,
            'token': svcRecord.Authorization.get('AccessToken')
        }
        # set base fitbit request uri
        activities_uri_origin = 'https://api.fitbit.com/1/user/' + userID + '/activities/list.json'

        # first execute offset = 0,
        # offset will be set to -1 if fitbit response don't give next pagination info
        # offset will be incremented by 1 if fitbit response give next pagination info
        index_total = 0
        while offset > -1:

            # prepare uri parameters
            uri_parameters['offset'] = offset
            # build fitbit uri with new parameters
            activities_uri = activities_uri_origin + "?" + urlencode(uri_parameters)
            # execute fitbit request using "request with auth" function (it refreshes token if needed)
            logging.info("\t\t downloading offset : " + str(offset))
            resp = self._requestWithAuth(lambda session: session.get(
                activities_uri,
                headers={
                    'Authorization': 'Bearer ' + svcRecord.Authorization.get('AccessToken')
                }), svcRecord)

            # check if request has error
            if resp.status_code != 204 and resp.status_code != 200:
                raise APIException("Unable to find Fitbit activities")

            # get request data
            data = {}
            try:
                data = resp.json()
            except ValueError:
                raise APIException("Failed parsing fitbit list response %s - %s" % (resp.status_code, resp.text))

            # if request return activities infos
            if data['activities']:
                ftbt_activities = data['activities']
                logging.info("\t\t nb activity : " + str(len(ftbt_activities)))

                # for every activities in this request pagination
                # (Fitbit give 20 activities MAXIMUM, use limit parameter)
                for ftbt_activity in ftbt_activities:
                    index_total = index_total +1
                    activity = UploadedActivity()

                    #parse date start to get timezone and date
                    parsedDate = ftbt_activity["startTime"][0:19] + ftbt_activity["startTime"][23:]
                    activity.StartTime = datetime.strptime(parsedDate, "%Y-%m-%dT%H:%M:%S%z")
                    activity.TZ = pytz.utc

                    logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, ftbt_activity["activityName"]))

                    activity.EndTime = activity.StartTime + timedelta(0, (ftbt_activity["duration"]/1000))
                    activity.ServiceData = {"ActivityID": ftbt_activity["logId"], "Manual": ftbt_activity["logType"]}

                    # check if activity type ID exists
                    if ftbt_activity["activityTypeId"] not in self._reverseActivityTypeMappings:
                        exclusions.append(APIExcludeActivity("Unsupported activity type %s" % ftbt_activity["activityTypeId"],
                                                             activity_id=ftbt_activity["logId"],
                                                             user_exception=UserException(UserExceptionType.Other)))
                        logger.info("\t\tUnknown activity")
                        continue

                    activity.Type = self._reverseActivityTypeMappings[ftbt_activity["activityTypeId"]]

                    activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers,
                                                                value=ftbt_activity["distance"])

                    if "speed" in ftbt_activity:
                        activity.Stats.Speed = ActivityStatistic(
                            ActivityStatisticUnit.KilometersPerHour,
                            avg=ftbt_activity["speed"],
                            max=ftbt_activity["speed"]
                        )
                    activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=ftbt_activity["calories"])
                    # Todo: find fitbit data name
                    #activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=ride[
                    #    "moving_time"] if "moving_time" in ride and ride[
                    #    "moving_time"] > 0 else None)  # They don't let you manually enter this, and I think it returns 0 for those activities.
                    # Todo: find fitbit data name
                    #if "average_watts" in ride:
                    #    activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts,
                    #                                             avg=ride["average_watts"])

                    if "averageHeartRate" in ftbt_activity:
                        activity.Stats.HR.update(
                            ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=ftbt_activity["averageHeartRate"]))
                    # Todo: find fitbit data name
                    #if "max_heartrate" in ride:
                    #    activity.Stats.HR.update(
                    #        ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=ride["max_heartrate"]))
                    # Todo: find fitbit data name
                    #if "average_cadence" in ride:
                    #    activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute,
                    #                                                    avg=ride["average_cadence"]))
                    # Todo: find fitbit data name
                    #if "average_temp" in ride:
                    #    activity.Stats.Temperature.update(
                    #        ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=ride["average_temp"]))

                    if "calories" in ftbt_activity:
                        activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories,
                                                                  value=ftbt_activity["calories"])
                    activity.Name = ftbt_activity["activityName"]


                    activity.Private = False
                    if ftbt_activity['logType'] is 'manual':
                        activity.Stationary = True
                    else:
                        activity.Stationary = False


                    # Todo: find fitbit data
                    #activity.GPS = ("start_latlng" in ride) and (ride["start_latlng"] is not None)
                    activity.AdjustTZ()
                    activity.CalculateUID()
                    activities.append(activity)
                    logging.info("\t\t Fitbit Activity ID : " + str(ftbt_activity["logId"]))

                if not exhaustive:
                    break
            # get next info for while condition and prepare offset for next request
            if 'next' not in data['pagination'] or not data['pagination']['next']:
                next = None
                offset = -1
            else:
                next = data['pagination']['next']
                offset = offset + 1

        logging.info("\t\t total Fitbit activities downloaded : " + str(index_total))
        return activities, exclusions
Пример #22
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({
                "start": (page - 1) * pageSz,
                "limit": pageSz
            }))

            res = self._request_with_reauth(
                serviceRecord, lambda session: session.get(
                    "https://connect.garmin.com/modern/proxy/activity-search-service-1.0/json/activities",
                    params={
                        "start": (page - 1) * pageSz,
                        "limit": pageSz
                    }))

            try:
                res = res.json()["results"]
            except ValueError:
                res_txt = res.text  # So it can capture in the log message
                raise APIException("Parse failure in GC list resp: %s - %s" %
                                   (res.status_code, res.text))
            if "activities" not in res:
                break  # No activities on this page - empty account.
            for act in res["activities"]:
                act = act["activity"]
                activity = UploadedActivity()

                # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped...
                activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act
                activity.GPS = "endLatitude" in act

                activity.Private = act["privacy"]["key"] == "private"

                try:
                    activity.TZ = pytz.timezone(act["activityTimeZone"]["key"])
                except pytz.exceptions.UnknownTimeZoneError:
                    activity.TZ = pytz.FixedOffset(
                        float(act["activityTimeZone"]["offset"]) * 60)

                logger.debug("Name " + act["activityName"]["value"] + ":")
                if len(act["activityName"]["value"].strip(
                )) and act["activityName"][
                        "value"] != "Untitled":  # This doesn't work for internationalized accounts, oh well.
                    activity.Name = act["activityName"]["value"]

                if len(act["activityDescription"]["value"].strip()):
                    activity.Notes = act["activityDescription"]["value"]

                # beginTimestamp/endTimestamp is in UTC
                activity.StartTime = pytz.utc.localize(
                    datetime.utcfromtimestamp(
                        float(act["beginTimestamp"]["millis"]) / 1000))
                if "sumElapsedDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(
                        0, round(float(act["sumElapsedDuration"]["value"])))
                elif "sumDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(
                        minutes=float(act["sumDuration"]
                                      ["minutesSeconds"].split(":")[0]),
                        seconds=float(act["sumDuration"]
                                      ["minutesSeconds"].split(":")[1]))
                else:
                    activity.EndTime = pytz.utc.localize(
                        datetime.utcfromtimestamp(
                            float(act["endTimestamp"]["millis"]) / 1000))
                logger.debug("Activity s/t " + str(activity.StartTime) +
                             " on page " + str(page))
                activity.AdjustTZ()

                if "sumDistance" in act and float(
                        act["sumDistance"]["value"]) != 0:
                    activity.Stats.Distance = ActivityStatistic(
                        self._unitMap[act["sumDistance"]["uom"]],
                        value=float(act["sumDistance"]["value"]))

                if "device" in act and act["device"]["key"] != "unknown":
                    devId = DeviceIdentifier.FindMatchingIdentifierOfType(
                        DeviceIdentifierType.GC, {"Key": act["device"]["key"]})
                    ver_split = act["device"]["key"].split(".")
                    ver_maj = None
                    ver_min = None
                    if len(ver_split) == 4:
                        # 2.90.0.0
                        ver_maj = int(ver_split[0])
                        ver_min = int(ver_split[1])
                    activity.Device = Device(devId,
                                             verMaj=ver_maj,
                                             verMin=ver_min)

                activity.Type = self._resolveActivityType(
                    act["activityType"]["key"])

                activity.CalculateUID()

                activity.ServiceData = {"ActivityID": int(act["activityId"])}

                activities.append(activity)
            logger.debug("Finished page " + str(page) + " of " +
                         str(res["search"]["totalPages"]))
            if not exhaustive or int(res["search"]["totalPages"]) == page:
                break
            else:
                page += 1
        return activities, exclusions
Пример #23
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50
        cookies = self._get_cookies(record=serviceRecord)
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({
                "start": (page - 1) * pageSz,
                "limit": pageSz
            }))
            self._rate_limit()
            res = requests.get(
                "http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities",
                params={
                    "start": (page - 1) * pageSz,
                    "limit": pageSz
                },
                cookies=cookies)
            res = res.json()["results"]
            if "activities" not in res:
                break  # No activities on this page - empty account.
            for act in res["activities"]:
                act = act["activity"]
                if "sumDistance" not in act:
                    exclusions.append(
                        APIExcludeActivity("No distance",
                                           activityId=act["activityId"],
                                           userException=UserException(
                                               UserExceptionType.Corrupt)))
                    continue
                activity = UploadedActivity()

                if "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act:  # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped...
                    activity.Stationary = True
                else:
                    activity.Stationary = False

                try:
                    activity.TZ = pytz.timezone(act["activityTimeZone"]["key"])
                except pytz.exceptions.UnknownTimeZoneError:
                    activity.TZ = pytz.FixedOffset(
                        float(act["activityTimeZone"]["offset"]) * 60)

                logger.debug("Name " + act["activityName"]["value"] + ":")
                if len(act["activityName"]["value"].strip(
                )) and act["activityName"][
                        "value"] != "Untitled":  # This doesn't work for internationalized accounts, oh well.
                    activity.Name = act["activityName"]["value"]

                if len(act["activityDescription"]["value"].strip()):
                    activity.Notes = act["activityDescription"]["value"]
                # beginTimestamp/endTimestamp is in UTC
                activity.StartTime = pytz.utc.localize(
                    datetime.utcfromtimestamp(
                        float(act["beginTimestamp"]["millis"]) / 1000))
                if "sumElapsedDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(
                        0, round(float(act["sumElapsedDuration"]["value"])))
                elif "sumDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(
                        minutes=float(act["sumDuration"]
                                      ["minutesSeconds"].split(":")[0]),
                        seconds=float(act["sumDuration"]
                                      ["minutesSeconds"].split(":")[1]))
                else:
                    activity.EndTime = pytz.utc.localize(
                        datetime.utcfromtimestamp(
                            float(act["endTimestamp"]["millis"]) / 1000))
                logger.debug("Activity s/t " + str(activity.StartTime) +
                             " on page " + str(page))
                activity.AdjustTZ()
                # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time.
                activity.Stats.Distance = ActivityStatistic(
                    self._unitMap[act["sumDistance"]["uom"]],
                    value=float(act["sumDistance"]["value"]))

                def mapStat(gcKey, statKey, type, useSourceUnits=False):
                    nonlocal activity, act
                    if gcKey in act:
                        value = float(act[gcKey]["value"])
                        if math.isinf(value):
                            return  # GC returns the minimum speed as "-Infinity" instead of 0 some times :S
                        activity.Stats.__dict__[statKey].update(
                            ActivityStatistic(self._unitMap[act[gcKey]["uom"]],
                                              **({
                                                  type: value
                                              })))
                        if useSourceUnits:
                            activity.Stats.__dict__[
                                statKey] = activity.Stats.__dict__[
                                    statKey].asUnits(
                                        self._unitMap[act[gcKey]["uom"]])

                if "sumMovingDuration" in act:
                    activity.Stats.MovingTime = ActivityStatistic(
                        ActivityStatisticUnit.Time,
                        value=timedelta(
                            seconds=float(act["sumMovingDuration"]["value"])))

                if "sumDuration" in act:
                    activity.Stats.TimerTime = ActivityStatistic(
                        ActivityStatisticUnit.Time,
                        value=timedelta(
                            minutes=float(act["sumDuration"]
                                          ["minutesSeconds"].split(":")[0]),
                            seconds=float(act["sumDuration"]
                                          ["minutesSeconds"].split(":")[1])))

                mapStat(
                    "minSpeed", "Speed", "min", useSourceUnits=True
                )  # We need to suppress conversion here, so we can fix the pace-speed issue below
                mapStat("maxSpeed", "Speed", "max", useSourceUnits=True)
                mapStat("weightedMeanSpeed",
                        "Speed",
                        "avg",
                        useSourceUnits=True)
                mapStat("minAirTemperature", "Temperature", "min")
                mapStat("maxAirTemperature", "Temperature", "max")
                mapStat("weightedMeanAirTemperature", "Temperature", "avg")
                mapStat("sumEnergy", "Energy", "value")
                mapStat("maxHeartRate", "HR", "max")
                mapStat("weightedMeanHeartRate", "HR", "avg")
                mapStat("maxRunCadence", "RunCadence", "max")
                mapStat("weightedMeanRunCadence", "RunCadence", "avg")
                mapStat("maxBikeCadence", "Cadence", "max")
                mapStat("weightedMeanBikeCadence", "Cadence", "avg")
                mapStat("minPower", "Power", "min")
                mapStat("maxPower", "Power", "max")
                mapStat("weightedMeanPower", "Power", "avg")
                mapStat("minElevation", "Elevation", "min")
                mapStat("maxElevation", "Elevation", "max")
                mapStat("gainElevation", "Elevation", "gain")
                mapStat("lossElevation", "Elevation", "loss")

                # In Garmin Land, max can be smaller than min for this field :S
                if activity.Stats.Power.Max is not None and activity.Stats.Power.Min is not None and activity.Stats.Power.Min > activity.Stats.Power.Max:
                    activity.Stats.Power.Min = None

                # To get it to match what the user sees in GC.
                if activity.Stats.RunCadence.Max is not None:
                    activity.Stats.RunCadence.Max *= 2
                if activity.Stats.RunCadence.Average is not None:
                    activity.Stats.RunCadence.Average *= 2

                # GC incorrectly reports pace measurements as kph/mph when they are in fact in min/km or min/mi
                if "minSpeed" in act:
                    if ":" in act["minSpeed"][
                            "withUnitAbbr"] and activity.Stats.Speed.Min:
                        activity.Stats.Speed.Min = 60 / activity.Stats.Speed.Min
                if "maxSpeed" in act:
                    if ":" in act["maxSpeed"][
                            "withUnitAbbr"] and activity.Stats.Speed.Max:
                        activity.Stats.Speed.Max = 60 / activity.Stats.Speed.Max
                if "weightedMeanSpeed" in act:
                    if ":" in act["weightedMeanSpeed"][
                            "withUnitAbbr"] and activity.Stats.Speed.Average:
                        activity.Stats.Speed.Average = 60 / activity.Stats.Speed.Average

                # Similarly, they do weird stuff with HR at times - %-of-max and zones
                # ...and we can't just fix these, so we have to calculate it after the fact (blegh)
                recalcHR = False
                if "maxHeartRate" in act:
                    if "%" in act["maxHeartRate"]["withUnitAbbr"] or "z" in act[
                            "maxHeartRate"]["withUnitAbbr"]:
                        activity.Stats.HR.Max = None
                        recalcHR = True
                if "weightedMeanHeartRate" in act:
                    if "%" in act["weightedMeanHeartRate"][
                            "withUnitAbbr"] or "z" in act[
                                "weightedMeanHeartRate"]["withUnitAbbr"]:
                        activity.Stats.HR.Average = None
                        recalcHR = True

                activity.Type = self._resolveActivityType(
                    act["activityType"]["key"])

                activity.CalculateUID()
                activity.ServiceData = {
                    "ActivityID": act["activityId"],
                    "RecalcHR": recalcHR
                }

                activities.append(activity)
            logger.debug("Finished page " + str(page) + " of " +
                         str(res["search"]["totalPages"]))
            if not exhaustive or int(res["search"]["totalPages"]) == page:
                break
            else:
                page += 1
        return activities, exclusions
Пример #24
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        activities = []
        session = self._get_session(record=serviceRecord)
        session.headers.update({"Accept": "application/json"})
        workouts_resp = session.get(
            "https://api.trainerroad.com/api/careerworkouts")

        if workouts_resp.status_code != 200:
            if workouts_resp.status_code == 401:
                raise APIException("Invalid login",
                                   block=True,
                                   user_exception=UserException(
                                       UserExceptionType.Authorization,
                                       intervention_required=True))
            raise APIException("Workout listing error")

        cached_record = cachedb.trainerroad_meta.find_one(
            {"ExternalID": serviceRecord.ExternalID})
        if not cached_record:
            cached_workout_meta = {}
        else:
            cached_workout_meta = cached_record["Workouts"]

        workouts = workouts_resp.json()
        for workout in workouts:
            # Un/f their API doesn't provide the start/end times in the list response
            # So we need to pull the extra data, if it's not already cached
            workout_id = str(workout["Id"])  # Mongo doesn't do non-string keys
            if workout_id not in cached_workout_meta:
                meta_resp = session.get(
                    "https://api.trainerroad.com/api/careerworkouts?guid=%s" %
                    workout["Guid"])
                # We don't need everything
                full_meta = meta_resp.json()
                meta = {
                    key: full_meta[key]
                    for key in [
                        "WorkoutDate", "WorkoutName", "WorkoutNotes",
                        "TotalMinutes", "TotalKM", "AvgWatts", "Kj"
                    ]
                }
                cached_workout_meta[workout_id] = meta
            else:
                meta = cached_workout_meta[workout_id]

            activity = UploadedActivity()
            activity.ServiceData = {"ID": int(workout_id)}
            activity.Name = meta["WorkoutName"]
            activity.Notes = meta["WorkoutNotes"]
            activity.Type = ActivityType.Cycling

            # Everything's in UTC
            activity.StartTime = dateutil.parser.parse(
                meta["WorkoutDate"]).replace(tzinfo=pytz.utc)
            activity.EndTime = activity.StartTime + timedelta(
                minutes=meta["TotalMinutes"])

            activity.Stats.Distance = ActivityStatistic(
                ActivityStatisticUnit.Kilometers, value=meta["TotalKM"])
            activity.Stats.Power = ActivityStatistic(
                ActivityStatisticUnit.Watts, avg=meta["AvgWatts"])
            activity.Stats.Energy = ActivityStatistic(
                ActivityStatisticUnit.Kilojoules, value=meta["Kj"])

            activity.Stationary = False
            activity.GPS = False
            activity.CalculateUID()

            activities.append(activity)

        cachedb.trainerroad_meta.update(
            {"ExternalID": serviceRecord.ExternalID}, {
                "ExternalID": serviceRecord.ExternalID,
                "Workouts": cached_workout_meta
            },
            upsert=True)

        return activities, []
Пример #25
0
    def DownloadActivityList(self, svcRec, exhaustive=False):
        dbcl = self._getClient(svcRec)
        if not svcRec.Authorization["Full"]:
            syncRoot = "/"
        else:
            syncRoot = svcRec.Config["SyncRoot"]
        cache = cachedb.dropbox_cache.find_one(
            {"ExternalID": svcRec.ExternalID})
        if cache is None:
            cache = {
                "ExternalID": svcRec.ExternalID,
                "Structure": [],
                "Activities": {}
            }
        if "Structure" not in cache:
            cache["Structure"] = []
        self._folderRecurse(cache["Structure"], dbcl, syncRoot)

        activities = []
        exclusions = []

        for dir in cache["Structure"]:
            for file in dir["Files"]:
                path = file["Path"]
                if svcRec.Authorization["Full"]:
                    relPath = path.replace(syncRoot, "", 1)
                else:
                    relPath = path.replace("/Apps/tapiriik/", "",
                                           1)  # dropbox api is meh api

                hashedRelPath = self._hash_path(relPath)
                if hashedRelPath in cache["Activities"]:
                    existing = cache["Activities"][hashedRelPath]
                else:
                    existing = None

                if not existing:
                    # Continue to use the old records keyed by UID where possible
                    existing = [
                        (k, x) for k, x in cache["Activities"].items()
                        if "Path" in x and x["Path"] == relPath
                    ]  # path is relative to syncroot to reduce churn if they relocate it
                    existing = existing[0] if existing else None
                    if existing is not None:
                        existUID, existing = existing
                        existing["UID"] = existUID

                if existing and existing["Rev"] == file["Rev"]:
                    # don't need entire activity loaded here, just UID
                    act = UploadedActivity()
                    act.UID = existing["UID"]
                    try:
                        act.StartTime = datetime.strptime(
                            existing["StartTime"], "%H:%M:%S %d %m %Y %z")
                    except:
                        act.StartTime = datetime.strptime(
                            existing["StartTime"], "%H:%M:%S %d %m %Y"
                        )  # Exactly one user has managed to break %z :S
                    if "EndTime" in existing:  # some cached activities may not have this, it is not essential
                        act.EndTime = datetime.strptime(
                            existing["EndTime"], "%H:%M:%S %d %m %Y %z")
                else:
                    logger.debug("Retrieving %s (%s)" %
                                 (path, "outdated meta cache"
                                  if existing else "not in meta cache"))
                    # get the full activity
                    try:
                        act, rev = self._getActivity(svcRec, dbcl, path)
                    except APIExcludeActivity as e:
                        logger.info("Encountered APIExcludeActivity %s" %
                                    str(e))
                        exclusions.append(strip_context(e))
                        continue

                    try:
                        act.EnsureTZ()
                    except:
                        pass  # We tried.

                    if hasattr(act, "OriginatedFromTapiriik"
                               ) and not act.CountTotalWaypoints():
                        # This is one of the files created when TCX export was hopelessly broken for non-GPS activities.
                        # Right now, no activities in dropbox from tapiriik should be devoid of waypoints - since dropbox doesn't receive stationary activities
                        # In the future when this changes, will obviously have to modify this code to also look at modification dates or similar.
                        if ".tcx.summary-data" in path:
                            logger.info("...summary file already moved")
                        else:
                            logger.info("...moving summary-only file")
                            dbcl.file_move(
                                path, path.replace(".tcx",
                                                   ".tcx.summary-data"))
                        continue  # DON'T include in listing - it'll be regenerated
                    del act.Laps
                    act.Laps = [
                    ]  # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM.
                    cache["Activities"][hashedRelPath] = {
                        "Rev": rev,
                        "UID": act.UID,
                        "StartTime":
                        act.StartTime.strftime("%H:%M:%S %d %m %Y %z"),
                        "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")
                    }
                tagRes = self._tagActivity(relPath)
                act.ServiceData = {"Path": path, "Tagged": tagRes is not None}

                act.Type = tagRes if tagRes is not None else ActivityType.Other

                logger.debug("Activity s/t %s" % act.StartTime)

                activities.append(act)

        if "_id" in cache:
            cachedb.dropbox_cache.save(cache)
        else:
            cachedb.dropbox_cache.insert(cache)
        return activities, exclusions
Пример #26
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []

        url = self.SetioDomain + "getRunsByUserId"
        extID = svcRecord.ExternalID

        payload = {"userId": extID}
        headers = {
            'content-type': "application/json",
            'cache-control': "no-cache",
        }
        response = requests.post(url,
                                 data=json.dumps(payload),
                                 headers=headers)
        try:
            reqdata = response.json()
        except ValueError:
            raise APIException("Failed parsing Setio list response %s - %s" %
                               (resp.status_code, resp.text))

        for ride in reqdata:
            activity = UploadedActivity()
            activity.StartTime = datetime.strptime(
                datetime.utcfromtimestamp(
                    ride["startTimeStamp"]).strftime('%Y-%m-%d %H:%M:%S'),
                "%Y-%m-%d %H:%M:%S")
            if "stopTimeStamp" in ride:
                activity.EndTime = datetime.strptime(
                    datetime.utcfromtimestamp(
                        ride["stopTimeStamp"]).strftime('%Y-%m-%d %H:%M:%S'),
                    "%Y-%m-%d %H:%M:%S")
            activity.ServiceData = {
                "ActivityID": ride["runId"],
                "Manual": "False"
            }

            activity.Name = ride["programName"]

            logger.debug("\tActivity s/t %s: %s" %
                         (activity.StartTime, activity.Name))
            activity.Type = ActivityType.Running
            if "totalDistance" in ride:
                activity.Stats.Distance = ActivityStatistic(
                    ActivityStatisticUnit.Meters, value=ride["totalDistance"])

            if "averageCadence" in ride:
                activity.Stats.Cadence.update(
                    ActivityStatistic(
                        ActivityStatisticUnit.RevolutionsPerMinute,
                        avg=ride["averageCadence"]))

            if "averageSpeed" in ride:
                activity.Stats.Speed = ActivityStatistic(
                    ActivityStatisticUnit.MetersPerSecond,
                    avg=ride["averageSpeed"])

            # get comment
            url = self.SetioDomain + "getRunComment"
            payload = {
                "userId": extID,
                "runId": activity.ServiceData["ActivityID"]
            }
            headers = {
                'content-type': "application/json",
                'cache-control': "no-cache",
            }
            streamdata = requests.post(url,
                                       data=json.dumps(payload),
                                       headers=headers)
            if streamdata.status_code == 500:
                raise APIException("Internal server error")

            if streamdata.status_code == 403:
                raise APIException("No authorization to download activity",
                                   block=True,
                                   user_exception=UserException(
                                       UserExceptionType.Authorization,
                                       intervention_required=True))

            activity.Notes = None
            if streamdata.status_code == 200:  # Ok
                try:
                    commentdata = streamdata.json()
                except:
                    raise APIException("Stream data returned is not JSON")

                if "comment" in commentdata:
                    activity.Notes = commentdata["comment"]

            activity.GPS = True

            activity.Private = False
            activity.Stationary = False  # True = no sensor data

            activity.CalculateUID()
            activities.append(activity)

        return activities, exclusions
Пример #27
0
    def DownloadActivityList(self, svcRecord, exhaustive_start_time=None):
        activities = []
        exclusions = []

        headers = self._apiHeaders(svcRecord)

        limitDateFormat = "%Y-%m-%d"

        if exhaustive_start_time:
            totalListEnd = datetime.now() + timedelta(
                days=1.5)  # Who knows which TZ it's in
            totalListStart = exhaustive_start_time - timedelta(days=1.5)
        else:
            totalListEnd = datetime.now() + timedelta(
                days=1.5)  # Who knows which TZ it's in
            totalListStart = totalListEnd - timedelta(
                days=20)  # Doesn't really matter

        listStep = timedelta(days=45)
        listEnd = totalListEnd
        listStart = max(totalListStart, totalListEnd - listStep)

        while True:
            logger.debug("Requesting %s to %s" % (listStart, listEnd))
            resp = requests.get(TRAININGPEAKS_API_BASE_URL +
                                "/v1/workouts/%s/%s" %
                                (listStart.strftime(limitDateFormat),
                                 listEnd.strftime(limitDateFormat)),
                                headers=headers)

            for act in resp.json():
                if not act.get("completed", True):
                    continue
                activity = UploadedActivity()
                activity.StartTime = dateutil.parser.parse(
                    act["StartTime"]).replace(tzinfo=None)
                logger.debug("Activity s/t " + str(activity.StartTime))
                activity.EndTime = activity.StartTime + timedelta(
                    hours=act["TotalTime"])
                activity.Name = act.get("Title", None)
                activity.Notes = act.get("Description", None)
                activity.Type = self._workoutTypeMappings.get(
                    act.get("WorkoutType", "").lower(), ActivityType.Other)

                activity.Stats.Cadence = ActivityStatistic(
                    ActivityStatisticUnit.RevolutionsPerMinute,
                    avg=act.get("CadenceAverage", None),
                    max=act.get("CadenceMaximum", None))
                activity.Stats.Distance = ActivityStatistic(
                    ActivityStatisticUnit.Meters,
                    value=act.get("Distance", None))
                activity.Stats.Elevation = ActivityStatistic(
                    ActivityStatisticUnit.Meters,
                    avg=act.get("ElevationAverage", None),
                    min=act.get("ElevationMinimum", None),
                    max=act.get("ElevationMaximum", None),
                    gain=act.get("ElevationGain", None),
                    loss=act.get("ElevationLoss", None))
                activity.Stats.Energy = ActivityStatistic(
                    ActivityStatisticUnit.Kilojoules,
                    value=act.get("Energy", None))
                activity.Stats.HR = ActivityStatistic(
                    ActivityStatisticUnit.BeatsPerMinute,
                    avg=act.get("HeartRateAverage", None),
                    min=act.get("HeartRateMinimum", None),
                    max=act.get("HeartRateMaximum", None))
                activity.Stats.Power = ActivityStatistic(
                    ActivityStatisticUnit.Watts,
                    avg=act.get("PowerAverage", None),
                    max=act.get("PowerMaximum", None))
                activity.Stats.Temperature = ActivityStatistic(
                    ActivityStatisticUnit.DegreesCelcius,
                    avg=act.get("TemperatureAverage", None),
                    min=act.get("TemperatureMinimum", None),
                    max=act.get("TemperatureMaximum", None))
                activity.Stats.Speed = ActivityStatistic(
                    ActivityStatisticUnit.MetersPerSecond,
                    avg=act.get("VelocityAverage", None),
                    max=act.get("VelocityMaximum", None))
                activity.CalculateUID()
                activities.append(activity)

            if not exhaustive_start_time:
                break

            listStart -= listStep
            listEnd -= listStep
            if listEnd < totalListStart:
                break

        return activities, exclusions
Пример #28
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #TODO find out polar session timeout
        session = self._get_session(serviceRecord)

        activities = []
        exclusions = []

        date_format = "{d.day}.{d.month}.{d.year}"
        end_date = datetime.now() + timedelta(days=1.5)
        start_date = date(1961, 4,
                          12) if exhaustive else end_date - timedelta(days=60)
        params = {
            "startDate": date_format.format(d=start_date),
            "endDate": date_format.format(d=end_date)
        }
        res = session.get(
            "https://polarpersonaltrainer.com/user/calendar/inc/listview.ftl",
            params=params)

        bs = BeautifulSoup(res.text, "html.parser")
        for activity_row in bs.select("tr[class^=listRow]"):

            data_cells = activity_row.findAll("td")
            info_cell = 0
            date_cell = 4
            time_cell = 3
            result_type_cell = 5
            sport_type_cell = 6
            type_data = data_cells[info_cell].find(
                "input", {"name": "calendarItemTypes"})
            # Skip fitness data whatever
            if type_data["value"] == "OptimizedExercise":
                activity = UploadedActivity()

                id = data_cells[info_cell].find(
                    "input", {"name": "calendarItem"})["value"]
                name = data_cells[info_cell].find(
                    "input", {"name": "calendarItemName"})["value"]
                activity.ExternalID = id
                activity.Name = name

                time_text = "{} {}".format(data_cells[date_cell].contents[0],
                                           data_cells[time_cell].contents[0])
                activity.StartTime = pytz.utc.localize(
                    datetime.strptime(time_text, "%d.%m.%Y %H:%M"))

                result_type_text = data_cells[result_type_cell].contents[0]
                if "Strength Training Result" in result_type_text:
                    activity.Type = ActivityType.StrengthTraining
                    # This type of activity always stationary
                    activity.Stationary = True
                else:
                    type_text = data_cells[sport_type_cell].contents[0]
                    activity.Type = self._reverseActivityMappings.get(
                        type_text.lower(), ActivityType.Other)

                logger.debug("\tActivity s/t {}: {}".format(
                    activity.StartTime, activity.Type))
                activity.CalculateUID()
                activities.append(activity)

        return activities, exclusions
Пример #29
0
    def _populate_sbr_activity(self, api_sbr_activity, usersettings):
        # Example JSON feed (unimportant fields have been removed)
        # [{
        #    "EventId": 63128401,                   #  Internal ID
        #    "EventType": 3,                        #  Swim (3), bike (1), or run (2)
        #    "EventDate": "4/22/2016",
        #    "EventTime": "7:44 AM",                #  User's time, time zone not specified
        #    "Planned": false,                      #  Training plan or actual data
        #    "TotalMinutes": 34.97,
        #    "TotalKilometers": 1.55448,
        #    "AverageHeartRate": 125,
        #    "MinimumHeartRate": 100,
        #    "MaximumHeartRate": 150,
        #    "MemberId": 999999,
        #    "MemberUsername": "******",
        #    "HasDeviceUpload": true,
        #    "DeviceUploadFile": "http://beginnertriathlete.com/discussion/storage/workouts/555555/abcd-123.fit",
        #    "RouteName": "",                       #  Might contain a description of the event
        #    "Comments": "",                        #  Same as above. Not overly often used.
        # }, ... ]

        activity = UploadedActivity()
        workout_id = api_sbr_activity["EventId"]
        eventType = api_sbr_activity["EventType"]
        eventDate = api_sbr_activity["EventDate"]
        eventTime = api_sbr_activity["EventTime"]
        totalMinutes = api_sbr_activity["TotalMinutes"]
        totalKms = api_sbr_activity["TotalKilometers"]
        averageHr = api_sbr_activity["AverageHeartRate"]
        minimumHr = api_sbr_activity["MinimumHeartRate"]
        maximumHr = api_sbr_activity["MaximumHeartRate"]
        deviceUploadFile = api_sbr_activity["DeviceUploadFile"]

        # Basic SBR data does not include GPS or sensor data. If this event originated from a device upload,
        # DownloadActivity will find it.
        activity.Stationary = True

        # Same as above- The data might be there, but it's not supplied in the basic activity feed.
        activity.GPS = False

        activity.Private = usersettings["Privacy"]
        activity.Type = self._workoutTypeMappings[str(eventType)]

        # Get the user's timezone from their profile. (Activity.TZ should be mentioned in the object hierarchy docs?)
        # Question: I believe if DownloadActivity finds device data, it will overwrite this. Which is OK with me.
        # The device data will most likely be more accurate.
        try:
            activity.TZ = pytz.timezone(usersettings["TimeZone"])
        except pytz.exceptions.UnknownTimeZoneError:
            activity.TZ = pytz.timezone(self._serverDefaultTimezone)

        # activity.StartTime and EndTime aren't mentioned in the object hierarchy docs, but I see them
        # set in all the other providers.
        activity.StartTime = dateutil.parser.parse(
            eventDate + " " + eventTime,
            dayfirst=False).replace(tzinfo=activity.TZ)
        activity.EndTime = activity.StartTime + timedelta(minutes=totalMinutes)

        # We can calculate some metrics from the supplied data. Would love to see some non-source code documentation
        # on each statistic and what it expects as input.
        activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers,
                                                    value=totalKms)
        activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute,
                                              avg=float(averageHr),
                                              min=float(minimumHr),
                                              max=float(maximumHr))
        activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds,
                                                      value=float(totalMinutes * 60))
        activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds,
                                                     value=float(totalMinutes * 60))
        # While BT does support laps, the current API doesn't report on them - a limitation that may need to be
        # corrected in a future update. For now, treat manual entries as a single lap. As more and more people upload
        # workouts using devices anyway, this probably matters much less than it once did.
        lap = Lap(stats=activity.Stats, startTime=activity.StartTime, endTime=activity.EndTime)
        activity.Laps = [lap]

        # Not 100% positive how this is utilized, but it is common for all providers. Detects duplicate downloads?
        activity.CalculateUID()

        # If a device file is attached, we'll get more details about this event in DownloadActivity
        activity.ServiceData = {
            "ID": int(workout_id),
            "DeviceUploadFile": deviceUploadFile
        }

        return activity
Пример #30
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):

        activities = []
        exclusions = []
        earliestDate = None
        earliestFirstPageDate = None
        paged = False

        while True:
            before = "" if earliestDate is None else earliestDate.astimezone(pytz.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
            params = {"authToken": serviceRecord.Authorization["AuthToken"], "maxResults": 45, "before": before}
            logger.debug("Req with " + str(params))
            response = requests.get("http://api.mobile.endomondo.com/mobile/api/workout/list", params=params)
            if response.status_code != 200:
                if response.status_code == 401 or response.status_code == 403:
                    raise APIAuthorizationException("No authorization to retrieve activity list")
                raise APIException("Unable to retrieve activity list " + str(response))
            data = response.json()
            for act in data["data"]:
                startTime = pytz.utc.localize(datetime.strptime(act["start_time"], "%Y-%m-%d %H:%M:%S UTC"))
                if earliestDate is None or startTime < earliestDate:  # probably redundant, I would assume it works out the TZes...
                    earliestDate = startTime
                logger.debug("activity pre")
                if not act["has_points"]:
                    logger.warning("\t no pts")
                    exclusions.append(APIExcludeActivity("No points", activityId=act["id"]))
                    continue # it'll break strava, which needs waypoints to find TZ. Meh
                if "tracking" in act and act["tracking"]:
                    logger.warning("\t tracking")
                    exclusions.append(APIExcludeActivity("In progress", activityId=act["id"], permanent=False))
                    continue  # come back once they've completed the activity
                activity = UploadedActivity()
                activity.StartTime = startTime
                activity.EndTime = activity.StartTime + timedelta(0, round(act["duration_sec"]))
                logger.debug("\tActivity s/t " + str(activity.StartTime))
                # attn service makers: why #(*%$ can't you all agree to use naive local time. So much simpler.
                cachedTrackData = cachedb.endomondo_activity_cache.find_one({"TrackID": act["id"]})
                if cachedTrackData is None:
                    data = self._downloadRawTrackRecord(serviceRecord, act["id"])
                    self._populateActivityFromTrackRecord(activity, data, minimumWaypoints=True)
                    cachedTrackData = {"Owner": serviceRecord.ExternalID, "TrackID": act["id"], "Data": data, "StartTime": activity.StartTime}
                    if not paged or AGGRESSIVE_CACHE:  # Don't cache stuff that we won't need in the immediate future.
                        cachedb.endomondo_activity_cache.insert(cachedTrackData)
                else:
                    self._populateActivityFromTrackRecord(activity, cachedTrackData["Data"], minimumWaypoints=True)
                activity.Waypoints = []
                if int(act["sport"]) in self._activityMappings:
                    activity.Type = self._activityMappings[int(act["sport"])]

                activity.UploadedTo = [{"Connection": serviceRecord, "ActivityID": act["id"], "ActivityData": cachedTrackData["Data"]}]
                activity.CalculateUID()
                activities.append(activity)
            if not paged:
                earliestFirstPageDate = earliestDate
            if not exhaustive or ("more" in data and data["more"] is False):
                break
            else:
                paged = True
        if not AGGRESSIVE_CACHE:
            cachedb.endomondo_activity_cache.remove({"Owner": serviceRecord.ExternalID, "$or":[{"StartTime":{"$lt": earliestFirstPageDate}}, {"StartTime":{"$exists": False}}]})
        return activities, exclusions
Пример #31
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        cookies = self._get_cookies(record=serviceRecord)
        activities = []
        exclusions = []
        pageUri = self.OpenFitEndpoint + "/fitnessActivities.json"
        while True:
            logger.debug("Req against " + pageUri)
            res = requests.get(pageUri, cookies=cookies)
            res = res.json()
            for act in res["items"]:
                activity = UploadedActivity()
                activity.UploadedTo = [{"Connection": serviceRecord, "ActivityURI": act["uri"]}]

                if len(act["name"].strip()):
                    activity.Name = act["name"]
                activity.StartTime = dateutil.parser.parse(act["start_time"])
                if isinstance(activity.StartTime.tzinfo, tzutc):
                    activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value.
                else:
                    activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo._offset.total_seconds() / 60)  # Convert the dateutil lame timezones into pytz awesome timezones.

                activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ)
                activity.EndTime = activity.StartTime + timedelta(seconds=float(act["duration"]))

                # Sometimes activities get returned with a UTC timezone even when they are clearly not in UTC.
                if activity.TZ == pytz.utc:
                    # So, we get the first location in the activity and calculate the TZ from that.
                    try:
                        firstLocation = self._downloadActivity(serviceRecord, activity, returnFirstLocation=True)
                    except APIExcludeActivity:
                        pass
                    else:
                        activity.CalculateTZ(firstLocation)
                        activity.AdjustTZ()

                logger.debug("Activity s/t " + str(activity.StartTime))
                activity.Distance = float(act["total_distance"])

                types = [x.strip().lower() for x in act["type"].split(":")]
                types.reverse()  # The incoming format is like "walking: hiking" and we want the most specific first
                activity.Type = None
                for type_key in types:
                    if type_key in self._activityMappings:
                        activity.Type = self._activityMappings[type_key]
                        break
                if not activity.Type:
                    exclusions.append(APIExcludeActivity("Unknown activity type %s" % act["type"], activityId=act["uri"]))
                    continue

                activity.CalculateUID()
                activities.append(activity)
            if not exhaustive or "next" not in res or not len(res["next"]):
                break
            else:
                pageUri = res["next"]
        return activities, exclusions
Пример #32
0
    def DownloadActivityList(self, svcRec, exhaustive=False):
        dbcl = self._getClient(svcRec)
        if not svcRec.Authorization["Full"]:
            syncRoot = "/"
        else:
            syncRoot = svcRec.Config["SyncRoot"]
        cache = cachedb.dropbox_cache.find_one({"ExternalID": svcRec.ExternalID})
        if cache is None:
            cache = {"ExternalID": svcRec.ExternalID, "Structure": [], "Activities": {}}
        if "Structure" not in cache:
            cache["Structure"] = []
        self._folderRecurse(cache["Structure"], dbcl, syncRoot)

        activities = []
        exclusions = []

        for dir in cache["Structure"]:
            for file in dir["Files"]:
                path = file["Path"]
                if svcRec.Authorization["Full"]:
                    relPath = path.replace(syncRoot, "", 1)
                else:
                    relPath = path.replace("/Apps/tapiriik/", "", 1)  # dropbox api is meh api

                hashedRelPath = self._hash_path(relPath)
                if hashedRelPath in cache["Activities"]:
                    existing = cache["Activities"][hashedRelPath]
                else:
                    existing = None

                if not existing:
                    # Continue to use the old records keyed by UID where possible
                    existing = [
                        (k, x) for k, x in cache["Activities"].items() if "Path" in x and x["Path"] == relPath
                    ]  # path is relative to syncroot to reduce churn if they relocate it
                    existing = existing[0] if existing else None
                    if existing is not None:
                        existUID, existing = existing
                        existing["UID"] = existUID

                if existing and existing["Rev"] == file["Rev"]:
                    # don't need entire activity loaded here, just UID
                    act = UploadedActivity()
                    act.UID = existing["UID"]
                    try:
                        act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y %z")
                    except:
                        act.StartTime = datetime.strptime(
                            existing["StartTime"], "%H:%M:%S %d %m %Y"
                        )  # Exactly one user has managed to break %z :S
                    if "EndTime" in existing:  # some cached activities may not have this, it is not essential
                        act.EndTime = datetime.strptime(existing["EndTime"], "%H:%M:%S %d %m %Y %z")
                else:
                    logger.debug(
                        "Retrieving %s (%s)" % (path, "outdated meta cache" if existing else "not in meta cache")
                    )
                    # get the full activity
                    try:
                        act, rev = self._getActivity(svcRec, dbcl, path)
                    except APIExcludeActivity as e:
                        logger.info("Encountered APIExcludeActivity %s" % str(e))
                        exclusions.append(strip_context(e))
                        continue

                    try:
                        act.EnsureTZ()
                    except:
                        pass  # We tried.

                    if hasattr(act, "OriginatedFromTapiriik") and not act.CountTotalWaypoints():
                        # This is one of the files created when TCX export was hopelessly broken for non-GPS activities.
                        # Right now, no activities in dropbox from tapiriik should be devoid of waypoints - since dropbox doesn't receive stationary activities
                        # In the future when this changes, will obviously have to modify this code to also look at modification dates or similar.
                        if ".tcx.summary-data" in path:
                            logger.info("...summary file already moved")
                        else:
                            logger.info("...moving summary-only file")
                            dbcl.file_move(path, path.replace(".tcx", ".tcx.summary-data"))
                        continue  # DON'T include in listing - it'll be regenerated
                    del act.Laps
                    act.Laps = (
                        []
                    )  # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM.
                    cache["Activities"][hashedRelPath] = {
                        "Rev": rev,
                        "UID": act.UID,
                        "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"),
                        "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z"),
                    }
                tagRes = self._tagActivity(relPath)
                act.ServiceData = {"Path": path, "Tagged": tagRes is not None}

                act.Type = tagRes if tagRes is not None else ActivityType.Other

                logger.debug("Activity s/t %s" % act.StartTime)

                activities.append(act)

        if "_id" in cache:
            cachedb.dropbox_cache.save(cache)
        else:
            cachedb.dropbox_cache.insert(cache)
        return activities, exclusions
Пример #33
0
    def DownloadActivityList(self, svcRec, exhaustive=False):
        dbcl = self._getClient(svcRec)
        if not svcRec.Authorization["Full"]:
            syncRoot = "/"
        else:
            syncRoot = svcRec.Config["SyncRoot"]
        # Dropbox API v2 doesn't like / as root.
        if syncRoot == "/":
            syncRoot = ""
        # New Dropbox API prefers path_lower, it would seem.
        syncRoot = syncRoot.lower()

        # There used to be a massive affair going on here to cache the folder structure locally.
        # Dropbox API 2.0 doesn't support the hashes I need for that.
        # Oh well. Throw that data out now. Well, don't load it at all.
        cache = cachedb.dropbox_cache.find_one(
            {"ExternalID": svcRec.ExternalID}, {
                "ExternalID": True,
                "Activities": True
            })
        if cache is None:
            cache = {"ExternalID": svcRec.ExternalID, "Activities": {}}

        try:
            list_result = dbcl.files_list_folder(syncRoot, recursive=True)
        except dropbox.exceptions.DropboxException as e:
            self._raiseDbException(e)

        def cache_writeback():
            if "_id" in cache:
                cachedb.dropbox_cache.save(cache)
            else:
                insert_result = cachedb.dropbox_cache.insert(cache)
                cache["_id"] = insert_result.inserted_id

        activities = []
        exclusions = []
        discovered_activity_cache_keys = set()

        while True:
            for entry in list_result.entries:
                if not hasattr(entry, "rev"):
                    # Not a file -> we don't care.
                    continue
                path = entry.path_lower
                if svcRec.Authorization["Full"]:
                    relPath = path.replace(syncRoot, "", 1)
                else:
                    relPath = path.replace("/Apps/tapiriik/", "",
                                           1)  # dropbox api is meh api

                hashedRelPath = self._hash_path(relPath)
                if hashedRelPath in cache["Activities"]:
                    existing = cache["Activities"][hashedRelPath]
                    discovered_activity_cache_keys.add(hashedRelPath)
                else:
                    existing = None

                if not existing:
                    # Continue to use the old records keyed by UID where possible
                    existing = [
                        (k, x) for k, x in cache["Activities"].items()
                        if "Path" in x and x["Path"] == relPath
                    ]  # path is relative to syncroot to reduce churn if they relocate it
                    existing = existing[0] if existing else None
                    if existing is not None:
                        existUID, existing = existing
                        discovered_activity_cache_keys.add(existUID)
                        existing["UID"] = existUID

                if existing and existing["Rev"] == entry.rev:
                    # don't need entire activity loaded here, just UID
                    act = UploadedActivity()
                    act.UID = existing["UID"]
                    try:
                        act.StartTime = datetime.strptime(
                            existing["StartTime"], "%H:%M:%S %d %m %Y %z")
                    except:
                        act.StartTime = datetime.strptime(
                            existing["StartTime"], "%H:%M:%S %d %m %Y"
                        )  # Exactly one user has managed to break %z :S
                    if "EndTime" in existing:  # some cached activities may not have this, it is not essential
                        act.EndTime = datetime.strptime(
                            existing["EndTime"], "%H:%M:%S %d %m %Y %z")
                else:
                    logger.debug("Retrieving %s (%s)" %
                                 (path, "outdated meta cache"
                                  if existing else "not in meta cache"))
                    # get the full activity
                    try:
                        act, rev = self._getActivity(svcRec, dbcl, path)
                    except APIExcludeActivity as e:
                        logger.info("Encountered APIExcludeActivity %s" %
                                    str(e))
                        exclusions.append(strip_context(e))
                        continue

                    try:
                        act.EnsureTZ()
                    except:
                        pass  # We tried.

                    act.Laps = [
                    ]  # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM.
                    cache["Activities"][hashedRelPath] = {
                        "Rev": rev,
                        "UID": act.UID,
                        "StartTime":
                        act.StartTime.strftime("%H:%M:%S %d %m %Y %z"),
                        "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")
                    }
                    # Incrementally update the cache db.
                    # Otherwise, if we crash later on in listing
                    # (due to OOM or similar), we'll never make progress on this account.
                    cache_writeback()
                    discovered_activity_cache_keys.add(hashedRelPath)
                tagRes = self._tagActivity(relPath)
                act.ServiceData = {"Path": path, "Tagged": tagRes is not None}

                act.Type = tagRes if tagRes is not None else ActivityType.Other

                logger.debug("Activity s/t %s" % act.StartTime)

                activities.append(act)

            # Perform pagination.
            if list_result.has_more:
                list_result = dbcl.files_list_folder_continue(
                    list_result.cursor)
            else:
                break

        # Drop deleted activities' records from cache.
        all_activity_cache_keys = set(cache["Activities"].keys())
        for deleted_key in all_activity_cache_keys - discovered_activity_cache_keys:
            del cache["Activities"][deleted_key]

        cache_writeback()
        return activities, exclusions
Пример #34
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50
        session = self._get_session(record=serviceRecord)
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({
                "start": (page - 1) * pageSz,
                "limit": pageSz
            }))
            self._rate_limit()

            retried_auth = False
            while True:
                res = session.get(
                    "http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities",
                    params={
                        "start": (page - 1) * pageSz,
                        "limit": pageSz
                    })
                # It's 10 PM and I have no clue why it's throwing these errors, maybe we just need to log in again?
                if res.status_code == 403 and not retried_auth:
                    retried_auth = True
                    session = self._get_session(serviceRecord, skip_cache=True)
                else:
                    break
            try:
                res = res.json()["results"]
            except ValueError:
                res_txt = res.text  # So it can capture in the log message
                raise APIException("Parse failure in GC list resp: %s" %
                                   res.status_code)
            if "activities" not in res:
                break  # No activities on this page - empty account.
            for act in res["activities"]:
                act = act["activity"]
                if "sumDistance" not in act:
                    exclusions.append(
                        APIExcludeActivity("No distance",
                                           activityId=act["activityId"],
                                           userException=UserException(
                                               UserExceptionType.Corrupt)))
                    continue
                activity = UploadedActivity()

                # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped...
                activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act
                activity.GPS = "endLatitude" in act

                activity.Private = act["privacy"]["key"] == "private"

                try:
                    activity.TZ = pytz.timezone(act["activityTimeZone"]["key"])
                except pytz.exceptions.UnknownTimeZoneError:
                    activity.TZ = pytz.FixedOffset(
                        float(act["activityTimeZone"]["offset"]) * 60)

                logger.debug("Name " + act["activityName"]["value"] + ":")
                if len(act["activityName"]["value"].strip(
                )) and act["activityName"][
                        "value"] != "Untitled":  # This doesn't work for internationalized accounts, oh well.
                    activity.Name = act["activityName"]["value"]

                if len(act["activityDescription"]["value"].strip()):
                    activity.Notes = act["activityDescription"]["value"]

                # beginTimestamp/endTimestamp is in UTC
                activity.StartTime = pytz.utc.localize(
                    datetime.utcfromtimestamp(
                        float(act["beginTimestamp"]["millis"]) / 1000))
                if "sumElapsedDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(
                        0, round(float(act["sumElapsedDuration"]["value"])))
                elif "sumDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(
                        minutes=float(act["sumDuration"]
                                      ["minutesSeconds"].split(":")[0]),
                        seconds=float(act["sumDuration"]
                                      ["minutesSeconds"].split(":")[1]))
                else:
                    activity.EndTime = pytz.utc.localize(
                        datetime.utcfromtimestamp(
                            float(act["endTimestamp"]["millis"]) / 1000))
                logger.debug("Activity s/t " + str(activity.StartTime) +
                             " on page " + str(page))
                activity.AdjustTZ()

                # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time.
                activity.Stats.Distance = ActivityStatistic(
                    self._unitMap[act["sumDistance"]["uom"]],
                    value=float(act["sumDistance"]["value"]))

                activity.Type = self._resolveActivityType(
                    act["activityType"]["key"])

                activity.CalculateUID()

                activity.ServiceData = {"ActivityID": int(act["activityId"])}

                activities.append(activity)
            logger.debug("Finished page " + str(page) + " of " +
                         str(res["search"]["totalPages"]))
            if not exhaustive or int(res["search"]["totalPages"]) == page:
                break
            else:
                page += 1
        return activities, exclusions
Пример #35
0
    def _populateActivity(self, rawRecord):
        ''' Populate the 1st level of the activity object with all details required for UID from pulsstory API data '''
        activity = UploadedActivity()
        #  can stay local + naive here, recipient services can calculate TZ as required
        activity.Name = rawRecord["Name"] if "Name" in rawRecord else None
        activity.StartTime = datetime.strptime(rawRecord["StartTime"],
                                               "%Y-%m-%d %H:%M:%S")
        activity.Stats.MovingTime = ActivityStatistic(
            ActivityStatisticUnit.Seconds, value=float(rawRecord["Duration"]))
        activity.EndTime = activity.StartTime + timedelta(
            seconds=float(rawRecord["Duration"]))
        activity.Stats.Distance = ActivityStatistic(
            ActivityStatisticUnit.Meters, value=rawRecord["Distance"])
        if (activity.EndTime - activity.StartTime).total_seconds() > 0:
            activity.Stats.Speed = ActivityStatistic(
                ActivityStatisticUnit.KilometersPerHour,
                avg=activity.Stats.Distance.asUnits(
                    ActivityStatisticUnit.Kilometers).Value /
                ((activity.EndTime - activity.StartTime).total_seconds() / 60 /
                 60))
        activity.Stats.Energy = ActivityStatistic(
            ActivityStatisticUnit.Kilocalories,
            value=rawRecord["Energy"] if "Energy" in rawRecord else None)
        if rawRecord["Type"] in self._activityMappings:
            activity.Type = self._activityMappings[rawRecord["Type"]]
        activity.GPS = rawRecord["HasPath"] if "HasPath" in rawRecord else False
        activity.Stationary = rawRecord[
            "HasPoints"] if "HasPoints" in rawRecord else True
        activity.Notes = rawRecord["Notes"] if "Notes" in rawRecord else None
        activity.Private = rawRecord[
            "Private"] if "Private" in rawRecord else True

        activity.CalculateUID()
        return activity
Пример #36
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        oauthSession = self._oauthSession(serviceRecord)

        activities = []
        exclusions = []

        page_url = "https://api.endomondo.com/api/1/workouts"

        while True:
            resp = oauthSession.get(page_url)
            try:
                respList = resp.json()["data"]
            except ValueError:
                self._rateLimitBailout(resp)
                raise APIException("Error decoding activity list resp %s %s" % (resp.status_code, resp.text))
            for actInfo in respList:
                activity = UploadedActivity()
                activity.StartTime = self._parseDate(actInfo["start_time"])
                logger.debug("Activity s/t %s" % activity.StartTime)
                if "is_tracking" in actInfo and actInfo["is_tracking"]:
                    exclusions.append(APIExcludeActivity("Not complete", activity_id=actInfo["id"], permanent=False, user_exception=UserException(UserExceptionType.LiveTracking)))
                    continue

                if "end_time" in actInfo:
                    activity.EndTime = self._parseDate(actInfo["end_time"])

                if actInfo["sport"] in self._activityMappings:
                    activity.Type = self._activityMappings[actInfo["sport"]]

                # "duration" is timer time
                if "duration_total" in actInfo:
                    activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(actInfo["duration_total"]))

                if "distance_total" in actInfo:
                    activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(actInfo["distance_total"]))

                if "calories_total" in actInfo:
                    activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=float(actInfo["calories_total"]))

                activity.Stats.Elevation = ActivityStatistic(ActivityStatisticUnit.Meters)

                if "altitude_max" in actInfo:
                    activity.Stats.Elevation.Max = float(actInfo["altitude_max"])

                if "altitude_min" in actInfo:
                    activity.Stats.Elevation.Min = float(actInfo["altitude_min"])

                if "total_ascent" in actInfo:
                    activity.Stats.Elevation.Gain = float(actInfo["total_ascent"])

                if "total_descent" in actInfo:
                    activity.Stats.Elevation.Loss = float(actInfo["total_descent"])

                activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour)
                if "speed_max" in actInfo:
                    activity.Stats.Speed.Max = float(actInfo["speed_max"])

                if "heart_rate_avg" in actInfo:
                    activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=float(actInfo["heart_rate_avg"]))

                if "heart_rate_max" in actInfo:
                    activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=float(actInfo["heart_rate_max"])))

                if "cadence_avg" in actInfo:
                    activity.Stats.Cadence = ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=int(actInfo["cadence_avg"]))

                if "cadence_max" in actInfo:
                    activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, max=int(actInfo["cadence_max"])))

                if "power_avg" in actInfo:
                    activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, avg=int(actInfo["power_avg"]))

                if "power_max" in actInfo:
                    activity.Stats.Power.update(ActivityStatistic(ActivityStatisticUnit.Watts, max=int(actInfo["power_max"])))

                if "title" in actInfo:
                    activity.Name = actInfo["title"]

                activity.ServiceData = {"WorkoutID": int(actInfo["id"]), "Sport": actInfo["sport"]}

                activity.CalculateUID()
                activities.append(activity)

            paging = resp.json()["paging"]
            if "next" not in paging or not paging["next"] or not exhaustive:
                break
            else:
                page_url = paging["next"]

        return activities, exclusions
Пример #37
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        logger.debug("DownloadActivityList")
        allItems = []
        headers = self._apiHeaders(serviceRecord)
        nextRequest = '/v7.1/workout/?user=' + str(serviceRecord.ExternalID)
        while True:
            response = requests.get("https://api.mapmyfitness.com" +
                                    nextRequest,
                                    headers=headers)
            if response.status_code != 200:
                if response.status_code == 401 or response.status_code == 403:
                    raise APIException(
                        "No authorization to retrieve activity list",
                        block=True,
                        user_exception=UserException(
                            UserExceptionType.Authorization,
                            intervention_required=True))
                raise APIException(
                    "Unable to retrieve activity list " + str(response),
                    serviceRecord)
            data = response.json()
            allItems += data["_embedded"]["workouts"]
            nextLink = data["_links"].get("next")
            if not exhaustive or not nextLink:
                break
            nextRequest = nextLink[0]["href"]

        activities = []
        exclusions = []
        for act in allItems:
            # TODO catch exception and add to exclusions
            activity = UploadedActivity()
            activityID = act["_links"]["self"][0]["id"]
            activity.StartTime = datetime.strptime(act["start_datetime"],
                                                   "%Y-%m-%dT%H:%M:%S%z")
            activity.Notes = act["notes"] if "notes" in act else None

            # aggregate
            aggregates = act["aggregates"]
            elapsed_time_total = aggregates[
                "elapsed_time_total"] if "elapsed_time_total" in aggregates else "0"
            activity.EndTime = activity.StartTime + timedelta(
                0, round(float(elapsed_time_total)))
            activity.Stats.TimerTime = ActivityStatistic(
                ActivityStatisticUnit.Seconds, value=float(elapsed_time_total))
            activity.Stats.MovingTime = ActivityStatistic(
                ActivityStatisticUnit.Seconds, value=float(elapsed_time_total))
            if "active_time_total" in aggregates:
                activity.Stats.MovingTime = ActivityStatistic(
                    ActivityStatisticUnit.Seconds,
                    value=float(aggregates["active_time_total"]))

            if "distance_total" in aggregates:
                activity.Stats.Distance = ActivityStatistic(
                    ActivityStatisticUnit.Meters,
                    value=float(aggregates["distance_total"]))

            if "speed_min" in aggregates:
                activity.Stats.Speed.Min = float(aggregates["speed_min"])
            if "speed_max" in aggregates:
                activity.Stats.Speed.Max = float(aggregates["speed_max"])
            if "speed_avg" in aggregates:
                activity.Stats.Speed.Average = float(aggregates["speed_avg"])

            if "heartrate_min" in aggregates:
                activity.Stats.HR.update(
                    ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute,
                                      min=float(aggregates["heartrate_min"])))
            if "heartrate_max" in aggregates:
                activity.Stats.HR.update(
                    ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute,
                                      max=float(aggregates["heartrate_max"])))
            if "heartrate_avg" in aggregates:
                activity.Stats.HR = ActivityStatistic(
                    ActivityStatisticUnit.BeatsPerMinute,
                    avg=float(aggregates["heartrate_avg"]))

            if "cadence_min" in aggregates:
                activity.Stats.Cadence.update(
                    ActivityStatistic(
                        ActivityStatisticUnit.RevolutionsPerMinute,
                        min=int(aggregates["cadence_min"])))
            if "cadence_max" in aggregates:
                activity.Stats.Cadence.update(
                    ActivityStatistic(
                        ActivityStatisticUnit.RevolutionsPerMinute,
                        max=int(aggregates["cadence_max"])))
            if "cadence_avg" in aggregates:
                activity.Stats.Cadence = ActivityStatistic(
                    ActivityStatisticUnit.RevolutionsPerMinute,
                    avg=int(aggregates["cadence_avg"]))

            if "power_min" in aggregates:
                activity.Stats.Power.update(
                    ActivityStatistic(ActivityStatisticUnit.Watts,
                                      min=int(aggregates["power_min"])))
            if "power_max" in aggregates:
                activity.Stats.Power.update(
                    ActivityStatistic(ActivityStatisticUnit.Watts,
                                      max=int(aggregates["power_max"])))
            if "power_avg" in aggregates:
                activity.Stats.Power = ActivityStatistic(
                    ActivityStatisticUnit.Watts,
                    avg=int(aggregates["power_avg"]))

            activityTypeLink = act["_links"].get("activity_type")
            activityTypeID = activityTypeLink[0][
                "id"] if activityTypeLink is not None else None

            privacyLink = act["_links"].get("privacy")
            privacyID = privacyLink[0][
                "id"] if privacyLink is not None else None
            activity.Private = privacyID == "0"

            activity.Type = self._resolveActivityType(activityTypeID, headers)

            activity.ServiceData = {
                "ActivityID": activityID,
                "activityTypeID": activityTypeID,
                "privacyID": privacyID
            }
            activity.CalculateUID()
            activities.append(activity)
        return activities, exclusions
Пример #38
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []

        now = datetime.now()
        prev = now - timedelta(6 * 365 / 12)

        period = []

        aperiod = "%s%02d-%s%02d" % (prev.year, prev.month, now.year,
                                     now.month)
        period.append(aperiod)

        if exhaustive:
            for _ in range(20):
                now = prev
                prev = now - timedelta(6 * 365 / 12)
                aperiod = "%s%02d-%s%02d" % (prev.year, prev.month, now.year,
                                             now.month)
                period.append(aperiod)

        for dateInterval in period:
            headers = self._getAuthHeaders(svcRecord)
            resp = requests.get(DECATHLONCOACH_API_BASE_URL + "/users/" +
                                str(svcRecord.ExternalID) +
                                "/activities.xml?date=" + dateInterval,
                                headers=headers)
            if resp.status_code == 400:
                logger.info(resp.content)
                raise APIException(
                    "No authorization to retrieve activity list",
                    block=True,
                    user_exception=UserException(
                        UserExceptionType.Authorization,
                        intervention_required=True))
            if resp.status_code == 401:
                logger.info(resp.content)
                raise APIException(
                    "No authorization to retrieve activity list",
                    block=True,
                    user_exception=UserException(
                        UserExceptionType.Authorization,
                        intervention_required=True))
            if resp.status_code == 403:
                logger.info(resp.content)
                raise APIException(
                    "No authorization to retrieve activity list",
                    block=True,
                    user_exception=UserException(
                        UserExceptionType.Authorization,
                        intervention_required=True))

            root = xml.fromstring(resp.content)

            logger.info("\t\t nb activity : " +
                        str(len(root.findall('.//ID'))))

            for ride in root.iter('ACTIVITY'):

                activity = UploadedActivity()
                activity.TZ = pytz.timezone("UTC")

                startdate = ride.find('.//STARTDATE').text + ride.find(
                    './/TIMEZONE').text
                datebase = parse(startdate)

                activity.StartTime = datebase  #pytz.utc.localize(datebase)

                activity.ServiceData = {
                    "ActivityID": ride.find('ID').text,
                    "Manual": ride.find('MANUAL').text
                }

                logger.info("\t\t DecathlonCoach Activity ID : " +
                            ride.find('ID').text)

                if ride.find('SPORTID'
                             ).text not in self._reverseActivityTypeMappings:
                    exclusions.append(
                        APIExcludeActivity("Unsupported activity type %s" %
                                           ride.find('SPORTID').text,
                                           activity_id=ride.find('ID').text,
                                           user_exception=UserException(
                                               UserExceptionType.Other)))
                    logger.info(
                        "\t\tDecathlonCoach Unknown activity, sport id " +
                        ride.find('SPORTID').text + " is not mapped")
                    continue

                activity.Type = self._reverseActivityTypeMappings[ride.find(
                    'SPORTID').text]

                for val in ride.iter('VALUE'):
                    if val.get('id') == self._unitMap["duration"]:
                        activity.EndTime = activity.StartTime + timedelta(
                            0, int(val.text))
                    if val.get('id') == self._unitMap["distance"]:
                        activity.Stats.Distance = ActivityStatistic(
                            ActivityStatisticUnit.Meters, value=int(val.text))
                    if val.get('id') == self._unitMap["kcal"]:
                        activity.Stats.Energy = ActivityStatistic(
                            ActivityStatisticUnit.Kilocalories,
                            value=int(val.text))
                    if val.get('id') == self._unitMap["speedaverage"]:
                        meterperhour = int(val.text)
                        meterpersecond = meterperhour / 3600
                        activity.Stats.Speed = ActivityStatistic(
                            ActivityStatisticUnit.MetersPerSecond,
                            avg=meterpersecond,
                            max=None)

                if ride.find('LIBELLE'
                             ).text == "" or ride.find('LIBELLE').text is None:
                    txtdate = startdate.split(' ')
                    activity.Name = "Sport DecathlonCoach " + txtdate[0]
                else:
                    activity.Name = ride.find('LIBELLE').text

                activity.Private = False
                activity.Stationary = ride.find('MANUAL').text
                activity.GPS = ride.find('ABOUT').find('TRACK').text
                activity.AdjustTZ()
                activity.CalculateUID()
                activities.append(activity)

        return activities, exclusions
Пример #39
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        # http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50
        cookies = self._get_cookies(record=serviceRecord)
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz}))
            self._rate_limit()
            res = requests.get(
                "http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities",
                params={"start": (page - 1) * pageSz, "limit": pageSz},
                cookies=cookies,
            )
            res = res.json()["results"]
            if "activities" not in res:
                break  # No activities on this page - empty account.
            for act in res["activities"]:
                act = act["activity"]
                if "sumDistance" not in act:
                    exclusions.append(APIExcludeActivity("No distance", activityId=act["activityId"]))
                    continue
                activity = UploadedActivity()

                if (
                    "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act
                ):  # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped...
                    activity.Stationary = True
                else:
                    activity.Stationary = False

                try:
                    activity.TZ = pytz.timezone(act["activityTimeZone"]["key"])
                except pytz.exceptions.UnknownTimeZoneError:
                    activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60)

                logger.debug("Name " + act["activityName"]["value"] + ":")
                if (
                    len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled"
                ):  # This doesn't work for internationalized accounts, oh well.
                    activity.Name = act["activityName"]["value"]

                if len(act["activityDescription"]["value"].strip()):
                    activity.Notes = act["activityDescription"]["value"]
                # beginTimestamp/endTimestamp is in UTC
                activity.StartTime = pytz.utc.localize(
                    datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"]) / 1000)
                )
                if "sumElapsedDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(
                        0, round(float(act["sumElapsedDuration"]["value"]))
                    )
                elif "sumDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(
                        minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]),
                        seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]),
                    )
                else:
                    activity.EndTime = pytz.utc.localize(
                        datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"]) / 1000)
                    )
                logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page))
                activity.AdjustTZ()
                # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time.
                activity.Stats.Distance = ActivityStatistic(
                    self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"])
                )

                def mapStat(gcKey, statKey, type, useSourceUnits=False):
                    nonlocal activity, act
                    if gcKey in act:
                        value = float(act[gcKey]["value"])
                        if math.isinf(value):
                            return  # GC returns the minimum speed as "-Infinity" instead of 0 some times :S
                        activity.Stats.__dict__[statKey].update(
                            ActivityStatistic(self._unitMap[act[gcKey]["uom"]], **({type: value}))
                        )
                        if useSourceUnits:
                            activity.Stats.__dict__[statKey] = activity.Stats.__dict__[statKey].asUnits(
                                self._unitMap[act[gcKey]["uom"]]
                            )

                if "sumMovingDuration" in act:
                    activity.Stats.MovingTime = ActivityStatistic(
                        ActivityStatisticUnit.Time, value=timedelta(seconds=float(act["sumMovingDuration"]["value"]))
                    )

                if "sumDuration" in act:
                    activity.Stats.TimerTime = ActivityStatistic(
                        ActivityStatisticUnit.Time,
                        value=timedelta(
                            minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]),
                            seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]),
                        ),
                    )

                mapStat(
                    "minSpeed", "Speed", "min", useSourceUnits=True
                )  # We need to suppress conversion here, so we can fix the pace-speed issue below
                mapStat("maxSpeed", "Speed", "max", useSourceUnits=True)
                mapStat("weightedMeanSpeed", "Speed", "avg", useSourceUnits=True)
                mapStat("minAirTemperature", "Temperature", "min")
                mapStat("maxAirTemperature", "Temperature", "max")
                mapStat("weightedMeanAirTemperature", "Temperature", "avg")
                mapStat("sumEnergy", "Energy", "value")
                mapStat("maxHeartRate", "HR", "max")
                mapStat("weightedMeanHeartRate", "HR", "avg")
                mapStat("maxRunCadence", "RunCadence", "max")
                mapStat("weightedMeanRunCadence", "RunCadence", "avg")
                mapStat("maxBikeCadence", "Cadence", "max")
                mapStat("weightedMeanBikeCadence", "Cadence", "avg")
                mapStat("minPower", "Power", "min")
                mapStat("maxPower", "Power", "max")
                mapStat("weightedMeanPower", "Power", "avg")
                mapStat("minElevation", "Elevation", "min")
                mapStat("maxElevation", "Elevation", "max")
                mapStat("gainElevation", "Elevation", "gain")
                mapStat("lossElevation", "Elevation", "loss")

                # In Garmin Land, max can be smaller than min for this field :S
                if (
                    activity.Stats.Power.Max is not None
                    and activity.Stats.Power.Min is not None
                    and activity.Stats.Power.Min > activity.Stats.Power.Max
                ):
                    activity.Stats.Power.Min = None

                # To get it to match what the user sees in GC.
                if activity.Stats.RunCadence.Max is not None:
                    activity.Stats.RunCadence.Max *= 2
                if activity.Stats.RunCadence.Average is not None:
                    activity.Stats.RunCadence.Average *= 2

                # GC incorrectly reports pace measurements as kph/mph when they are in fact in min/km or min/mi
                if "minSpeed" in act:
                    if ":" in act["minSpeed"]["withUnitAbbr"] and activity.Stats.Speed.Min:
                        activity.Stats.Speed.Min = 60 / activity.Stats.Speed.Min
                if "maxSpeed" in act:
                    if ":" in act["maxSpeed"]["withUnitAbbr"] and activity.Stats.Speed.Max:
                        activity.Stats.Speed.Max = 60 / activity.Stats.Speed.Max
                if "weightedMeanSpeed" in act:
                    if ":" in act["weightedMeanSpeed"]["withUnitAbbr"] and activity.Stats.Speed.Average:
                        activity.Stats.Speed.Average = 60 / activity.Stats.Speed.Average

                # Similarly, they do weird stuff with HR at times - %-of-max and zones
                # ...and we can't just fix these, so we have to calculate it after the fact (blegh)
                recalcHR = False
                if "maxHeartRate" in act:
                    if "%" in act["maxHeartRate"]["withUnitAbbr"] or "z" in act["maxHeartRate"]["withUnitAbbr"]:
                        activity.Stats.HR.Max = None
                        recalcHR = True
                if "weightedMeanHeartRate" in act:
                    if (
                        "%" in act["weightedMeanHeartRate"]["withUnitAbbr"]
                        or "z" in act["weightedMeanHeartRate"]["withUnitAbbr"]
                    ):
                        activity.Stats.HR.Average = None
                        recalcHR = True

                activity.Type = self._resolveActivityType(act["activityType"]["key"])

                activity.CalculateUID()
                activity.ServiceData = {"ActivityID": act["activityId"], "RecalcHR": recalcHR}

                activities.append(activity)
            logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"]))
            if not exhaustive or int(res["search"]["totalPages"]) == page:
                break
            else:
                page += 1
        return activities, exclusions
Пример #40
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50
        cookies = self._get_cookies(record=serviceRecord)
        page = 1
        pageSz = 50
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz}))
            res = requests.get("http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz}, cookies=cookies)
            res = res.json()["results"]
            if "activities" not in res:
                break  # No activities on this page - empty account.
            for act in res["activities"]:
                act = act["activity"]
                if "beginLatitude" not in act or "endLatitude" not in act or (act["beginLatitude"] is act["endLatitude"] and act["beginLongitude"] is act["endLongitude"]):
                    exclusions.append(APIExcludeActivity("No points", activityId=act["activityId"]))
                    continue
                if "sumDistance" not in act:
                    exclusions.append(APIExcludeActivity("No distance", activityId=act["activityId"]))
                    continue
                activity = UploadedActivity()

                try:
                    activity.TZ = pytz.timezone(act["activityTimeZone"]["key"])
                except pytz.exceptions.UnknownTimeZoneError:
                    activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60)

                logger.debug("Name " + act["activityName"]["value"] + ":")
                if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled":
                    activity.Name = act["activityName"]["value"]
                # beginTimestamp/endTimestamp is in UTC
                activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000))
                if "sumElapsedDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"])))
                elif "sumDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]))
                else:
                    activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000))
                logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page))
                activity.AdjustTZ()
                # TODO: fix the distance stats to account for the fact that this incorrectly reported km instead of meters for the longest time.
                activity.Distance = float(act["sumDistance"]["value"]) * (1.60934 if act["sumDistance"]["uom"] == "mile" else 1) * 1000  # In meters...
                activity.Type = self._resolveActivityType(act["activityType"]["key"])

                activity.CalculateUID()
                activity.UploadedTo = [{"Connection": serviceRecord, "ActivityID": act["activityId"]}]
                activities.append(activity)
            logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"]))
            if not exhaustive or int(res["search"]["totalPages"]) == page:
                break
            else:
                page += 1
        return activities, exclusions
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []

        url = self.SingletrackerDomain + "getRidesByUserId"
        extID = svcRecord.ExternalID

        payload = {"userId": extID}
        headers = {
            'content-type': "application/json",
            'cache-control': "no-cache",
        }
        response = requests.post(url,
                                 data=json.dumps(payload),
                                 headers=headers)
        try:
            reqdata = response.json()
        except ValueError:
            raise APIException(
                "Failed parsing Singletracker list response %s - %s" %
                (resp.status_code, resp.text))

        for ride in reqdata:
            activity = UploadedActivity()
            activity.StartTime = datetime.strptime(
                datetime.utcfromtimestamp(
                    ride["startTime"]).strftime('%Y-%m-%d %H:%M:%S'),
                "%Y-%m-%d %H:%M:%S")
            if "stopTime" in ride:
                activity.EndTime = datetime.strptime(
                    datetime.utcfromtimestamp(
                        ride["stopTime"]).strftime('%Y-%m-%d %H:%M:%S'),
                    "%Y-%m-%d %H:%M:%S")
            activity.ServiceData = {
                "ActivityID": ride["rideId"],
                "Manual": "False"
            }

            activity.Name = ride["trackName"]

            logger.debug("\tActivity s/t %s: %s" %
                         (activity.StartTime, activity.Name))
            activity.Type = ActivityType.MountainBiking
            if "totalDistance" in ride:
                activity.Stats.Distance = ActivityStatistic(
                    ActivityStatisticUnit.Meters, value=ride["totalDistance"])

            if "avgSpeed" in ride:
                activity.Stats.Speed = ActivityStatistic(
                    ActivityStatisticUnit.MetersPerSecond,
                    avg=ride["avgSpeed"])
            activity.Notes = None

            activity.GPS = True

            activity.Private = False
            activity.Stationary = False  # True = no sensor data

            activity.CalculateUID()
            activities.append(activity)

        return activities, exclusions
Пример #42
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        headers = self._getAuthHeaders(serviceRecord)
        activities = []
        exclusions = []
        pageUri = self.OpenFitEndpoint + "/fitnessActivities.json"

        activity_tz_cache_raw = cachedb.sporttracks_meta_cache.find_one({"ExternalID": serviceRecord.ExternalID})
        activity_tz_cache_raw = activity_tz_cache_raw if activity_tz_cache_raw else {"Activities":[]}
        activity_tz_cache = dict([(x["ActivityURI"], x["TZ"]) for x in activity_tz_cache_raw["Activities"]])

        while True:
            logger.debug("Req against " + pageUri)
            res = requests.get(pageUri, headers=headers)
            try:
                res = res.json()
            except ValueError:
                raise APIException("Could not decode activity list response %s %s" % (res.status_code, res.text))
            for act in res["items"]:
                activity = UploadedActivity()
                activity.ServiceData = {"ActivityURI": act["uri"]}

                if len(act["name"].strip()):
                    activity.Name = act["name"]
                    # Longstanding ST.mobi bug causes it to return negative partial-hour timezones as "-2:-30" instead of "-2:30"
                fixed_start_time = re.sub(r":-(\d\d)", r":\1", act["start_time"])
                activity.StartTime = dateutil.parser.parse(fixed_start_time)
                if isinstance(activity.StartTime.tzinfo, tzutc):
                    activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value.
                else:
                    activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo.utcoffset(activity.StartTime).total_seconds() / 60)  # Convert the dateutil lame timezones into pytz awesome timezones.

                activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ)
                activity.EndTime = activity.StartTime + timedelta(seconds=float(act["duration"]))
                activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(act["duration"]))  # OpenFit says this excludes paused times.

                # Sometimes activities get returned with a UTC timezone even when they are clearly not in UTC.
                if activity.TZ == pytz.utc:
                    if act["uri"] in activity_tz_cache:
                        activity.TZ = pytz.FixedOffset(activity_tz_cache[act["uri"]])
                    else:
                        # So, we get the first location in the activity and calculate the TZ from that.
                        try:
                            firstLocation = self._downloadActivity(serviceRecord, activity, returnFirstLocation=True)
                        except APIExcludeActivity:
                            pass
                        else:
                            try:
                                activity.CalculateTZ(firstLocation, recalculate=True)
                            except:
                                # We tried!
                                pass
                            else:
                                activity.AdjustTZ()
                            finally:
                                activity_tz_cache[act["uri"]] = activity.StartTime.utcoffset().total_seconds() / 60

                logger.debug("Activity s/t " + str(activity.StartTime))
                activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(act["total_distance"]))

                types = [x.strip().lower() for x in act["type"].split(":")]
                types.reverse()  # The incoming format is like "walking: hiking" and we want the most specific first
                activity.Type = None
                for type_key in types:
                    if type_key in self._activityMappings:
                        activity.Type = self._activityMappings[type_key]
                        break
                if not activity.Type:
                    exclusions.append(APIExcludeActivity("Unknown activity type %s" % act["type"], activityId=act["uri"], userException=UserException(UserExceptionType.Other)))
                    continue

                activity.CalculateUID()
                activities.append(activity)
            if not exhaustive or "next" not in res or not len(res["next"]):
                break
            else:
                pageUri = res["next"]
        logger.debug("Writing back meta cache")
        cachedb.sporttracks_meta_cache.update({"ExternalID": serviceRecord.ExternalID}, {"ExternalID": serviceRecord.ExternalID, "Activities": [{"ActivityURI": k, "TZ": v} for k, v in activity_tz_cache.items()]}, upsert=True)
        return activities, exclusions
Пример #43
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?limit=20&start=0
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        force_reauth = False
        while True:
            logger.debug("Req with " + str({
                "start": (page - 1) * pageSz,
                "limit": pageSz
            }))

            res = self._request_with_reauth(lambda session: session.get(
                "https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities",
                params={
                    "start": (page - 1) * pageSz,
                    "limit": pageSz
                }),
                                            serviceRecord,
                                            force_skip_cache=force_reauth)
            force_reauth = False

            try:
                res = res.json()
            except ValueError:
                res_txt = res.text  # So it can capture in the log message
                raise APIException("Parse failure in GC list resp: %s - %s" %
                                   (res.status_code, res_txt))
            for act in res:
                if "ROLE_SYSTEM" in act["userRoles"]:
                    # GC for some reason return test data set instead of 401 for unauthorized call
                    force_reauth = True
                    break

                activity = UploadedActivity()
                # stationary activities have movingDuration = None while non-gps static activities have 0.0
                activity.Stationary = act["movingDuration"] is None
                activity.GPS = act["hasPolyline"]

                activity.Private = act["privacy"]["typeKey"] == "private"

                activity_name = act["activityName"]
                logger.debug("Name " + activity_name if activity_name
                             is not None else "Untitled" + ":")
                if activity_name is not None and len(
                        activity_name.strip()
                ) and activity_name != "Untitled":  # This doesn't work for internationalized accounts, oh well.
                    activity.Name = activity_name

                activity_description = act["description"]
                if activity_description is not None and len(
                        activity_description.strip()):
                    activity.Notes = activity_description

                activity.StartTime = pytz.utc.localize(
                    datetime.strptime(act["startTimeGMT"],
                                      "%Y-%m-%d %H:%M:%S"))
                if act["elapsedDuration"] is not None:
                    activity.EndTime = activity.StartTime + timedelta(
                        0,
                        float(act["elapsedDuration"]) / 1000)
                else:
                    activity.EndTime = activity.StartTime + timedelta(
                        0, float(act["duration"]))

                logger.debug("Activity s/t " + str(activity.StartTime) +
                             " on page " + str(page))

                if "distance" in act and act["distance"] and float(
                        act["distance"]) != 0:
                    activity.Stats.Distance = ActivityStatistic(
                        ActivityStatisticUnit.Meters,
                        value=float(act["distance"]))

                activity.Type = self._resolveActivityType(
                    act["activityType"]["typeKey"])

                activity.CalculateUID()

                activity.ServiceData = {"ActivityID": int(act["activityId"])}

                activities.append(activity)

            if force_reauth:
                # Re-run activity listing
                continue

            logger.debug("Finished page " + str(page))
            if not exhaustive or len(res) == 0:
                break
            else:
                page += 1
        return activities, exclusions
Пример #44
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        ns = self._tp_ns
        activities = []
        exclusions = []

        reqData = self._authData(svcRecord)

        limitDateFormat = "%d %B %Y"

        if exhaustive:
            listEnd = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in
            listStart = datetime(day=1, month=1, year=1980) # The beginning of time
        else:
            listEnd = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in
            listStart = listEnd - timedelta(days=20) # Doesn't really matter

        lastActivityDay = None
        discoveredWorkoutIds = []
        while True:
            reqData.update({"startDate": listStart.strftime(limitDateFormat), "endDate": listEnd.strftime(limitDateFormat)})
            print("Requesting %s to %s" % (listStart, listEnd))
            resp = requests.post("https://www.trainingpeaks.com/tpwebservices/service.asmx/GetWorkoutsForAthlete", data=reqData)
            xresp = etree.XML(resp.content)
            for xworkout in xresp:
                activity = UploadedActivity()

                workoutId = xworkout.find("tpw:WorkoutId", namespaces=ns).text

                workoutDayEl = xworkout.find("tpw:WorkoutDay", namespaces=ns)
                startTimeEl = xworkout.find("tpw:StartTime", namespaces=ns)

                workoutDay = dateutil.parser.parse(workoutDayEl.text)
                startTime = dateutil.parser.parse(startTimeEl.text) if startTimeEl is not None and startTimeEl.text else None

                if lastActivityDay is None or workoutDay.replace(tzinfo=None) > lastActivityDay:
                    lastActivityDay = workoutDay.replace(tzinfo=None)

                if startTime is None:
                    continue # Planned but not executed yet.
                activity.StartTime = startTime

                endTimeEl = xworkout.find("tpw:TimeTotalInSeconds", namespaces=ns)
                if not endTimeEl.text:
                    exclusions.append(APIExcludeActivity("Activity has no duration", activity_id=workoutId, user_exception=UserException(UserExceptionType.Corrupt)))
                    continue

                activity.EndTime = activity.StartTime + timedelta(seconds=float(endTimeEl.text))

                distEl = xworkout.find("tpw:DistanceInMeters", namespaces=ns)
                if distEl.text:
                    activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=float(distEl.text))
                # PWX is damn near comprehensive, no need to fill in any of the other statisitcs here, really

                if workoutId in discoveredWorkoutIds:
                    continue # There's the possibility of query overlap, if there are multiple activities on a single day that fall across the query return limit
                discoveredWorkoutIds.append(workoutId)

                workoutTypeEl = xworkout.find("tpw:WorkoutTypeDescription", namespaces=ns)
                if workoutTypeEl.text:
                    if workoutTypeEl.text == "Day Off":
                        continue # TrainingPeaks has some weird activity types...
                    if workoutTypeEl.text not in self._workoutTypeMappings:
                        exclusions.append(APIExcludeActivity("Activity type %s unknown" % workoutTypeEl.text, activity_id=workoutId, user_exception=UserException(UserExceptionType.Corrupt)))
                        continue
                    activity.Type = self._workoutTypeMappings[workoutTypeEl.text]

                activity.ServiceData = {"WorkoutID": workoutId}
                activity.CalculateUID()
                activities.append(activity)

            if not exhaustive:
                break

            # Since TP only lets us query by date range, to get full activity history we need to query successively smaller ranges
            if len(xresp):
                if listStart == lastActivityDay:
                    break # This wouldn't work if you had more than #MaxQueryReturn activities on that day - but that number is probably 50+
                listStart = lastActivityDay
            else:
                break # We're done

        return activities, exclusions
Пример #45
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        headers = self._getAuthHeaders(serviceRecord)
        activities = []
        exclusions = []
        pageUri = self.OpenFitEndpoint + "/fitnessActivities.json"

        activity_tz_cache_raw = cachedb.sporttracks_meta_cache.find_one(
            {"ExternalID": serviceRecord.ExternalID})
        activity_tz_cache_raw = activity_tz_cache_raw if activity_tz_cache_raw else {
            "Activities": []
        }
        activity_tz_cache = dict([(x["ActivityURI"], x["TZ"])
                                  for x in activity_tz_cache_raw["Activities"]
                                  ])

        while True:
            logger.debug("Req against " + pageUri)
            res = requests.get(pageUri, headers=headers)
            try:
                res = res.json()
            except ValueError:
                raise APIException(
                    "Could not decode activity list response %s %s" %
                    (res.status_code, res.text))
            for act in res["items"]:
                activity = UploadedActivity()
                activity.ServiceData = {"ActivityURI": act["uri"]}

                if len(act["name"].strip()):
                    activity.Name = act["name"]
                    # Longstanding ST.mobi bug causes it to return negative partial-hour timezones as "-2:-30" instead of "-2:30"
                fixed_start_time = re.sub(r":-(\d\d)", r":\1",
                                          act["start_time"])
                activity.StartTime = dateutil.parser.parse(fixed_start_time)
                if isinstance(activity.StartTime.tzinfo, tzutc):
                    activity.TZ = pytz.utc  # The dateutil tzutc doesn't have an _offset value.
                else:
                    activity.TZ = pytz.FixedOffset(
                        activity.StartTime.tzinfo.utcoffset(
                            activity.StartTime).total_seconds() / 60
                    )  # Convert the dateutil lame timezones into pytz awesome timezones.

                activity.StartTime = activity.StartTime.replace(
                    tzinfo=activity.TZ)
                activity.EndTime = activity.StartTime + timedelta(
                    seconds=float(act["duration"]))
                activity.Stats.TimerTime = ActivityStatistic(
                    ActivityStatisticUnit.Seconds,
                    value=float(act["duration"]
                                ))  # OpenFit says this excludes paused times.

                # Sometimes activities get returned with a UTC timezone even when they are clearly not in UTC.
                if activity.TZ == pytz.utc:
                    if act["uri"] in activity_tz_cache:
                        activity.TZ = pytz.FixedOffset(
                            activity_tz_cache[act["uri"]])
                    else:
                        # So, we get the first location in the activity and calculate the TZ from that.
                        try:
                            firstLocation = self._downloadActivity(
                                serviceRecord,
                                activity,
                                returnFirstLocation=True)
                        except APIExcludeActivity:
                            pass
                        else:
                            try:
                                activity.CalculateTZ(firstLocation,
                                                     recalculate=True)
                            except:
                                # We tried!
                                pass
                            else:
                                activity.AdjustTZ()
                            finally:
                                activity_tz_cache[
                                    act["uri"]] = activity.StartTime.utcoffset(
                                    ).total_seconds() / 60

                logger.debug("Activity s/t " + str(activity.StartTime))
                activity.Stats.Distance = ActivityStatistic(
                    ActivityStatisticUnit.Meters,
                    value=float(act["total_distance"]))

                types = [x.strip().lower() for x in act["type"].split(":")]
                types.reverse(
                )  # The incoming format is like "walking: hiking" and we want the most specific first
                activity.Type = None
                for type_key in types:
                    if type_key in self._activityMappings:
                        activity.Type = self._activityMappings[type_key]
                        break
                if not activity.Type:
                    exclusions.append(
                        APIExcludeActivity("Unknown activity type %s" %
                                           act["type"],
                                           activity_id=act["uri"],
                                           user_exception=UserException(
                                               UserExceptionType.Other)))
                    continue

                activity.CalculateUID()
                activities.append(activity)
            if not exhaustive or "next" not in res or not len(res["next"]):
                break
            else:
                pageUri = res["next"]
        logger.debug("Writing back meta cache")
        cachedb.sporttracks_meta_cache.update(
            {"ExternalID": serviceRecord.ExternalID}, {
                "ExternalID":
                serviceRecord.ExternalID,
                "Activities": [{
                    "ActivityURI": k,
                    "TZ": v
                } for k, v in activity_tz_cache.items()]
            },
            upsert=True)
        return activities, exclusions
Пример #46
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        #http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?&start=0&limit=50
        session = self._get_session(record=serviceRecord)
        page = 1
        pageSz = 100
        activities = []
        exclusions = []
        while True:
            logger.debug("Req with " + str({"start": (page - 1) * pageSz, "limit": pageSz}))
            self._rate_limit()

            retried_auth = False
            while True:
                res = session.get("http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities", params={"start": (page - 1) * pageSz, "limit": pageSz})
                # It's 10 PM and I have no clue why it's throwing these errors, maybe we just need to log in again?
                if res.status_code == 403 and not retried_auth:
                    retried_auth = True
                    session = self._get_session(serviceRecord, skip_cache=True)
                else:
                    break
            try:
                res = res.json()["results"]
            except ValueError:
                res_txt = res.text # So it can capture in the log message
                raise APIException("Parse failure in GC list resp: %s" % res.status_code)
            if "activities" not in res:
                break  # No activities on this page - empty account.
            for act in res["activities"]:
                act = act["activity"]
                activity = UploadedActivity()

                # Don't really know why sumSampleCountTimestamp doesn't appear in swim activities - they're definitely timestamped...
                activity.Stationary = "sumSampleCountSpeed" not in act and "sumSampleCountTimestamp" not in act
                activity.GPS = "endLatitude" in act

                activity.Private = act["privacy"]["key"] == "private"

                try:
                    activity.TZ = pytz.timezone(act["activityTimeZone"]["key"])
                except pytz.exceptions.UnknownTimeZoneError:
                    activity.TZ = pytz.FixedOffset(float(act["activityTimeZone"]["offset"]) * 60)

                logger.debug("Name " + act["activityName"]["value"] + ":")
                if len(act["activityName"]["value"].strip()) and act["activityName"]["value"] != "Untitled": # This doesn't work for internationalized accounts, oh well.
                    activity.Name = act["activityName"]["value"]

                if len(act["activityDescription"]["value"].strip()):
                    activity.Notes = act["activityDescription"]["value"]

                # beginTimestamp/endTimestamp is in UTC
                activity.StartTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["beginTimestamp"]["millis"])/1000))
                if "sumElapsedDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(0, round(float(act["sumElapsedDuration"]["value"])))
                elif "sumDuration" in act:
                    activity.EndTime = activity.StartTime + timedelta(minutes=float(act["sumDuration"]["minutesSeconds"].split(":")[0]), seconds=float(act["sumDuration"]["minutesSeconds"].split(":")[1]))
                else:
                    activity.EndTime = pytz.utc.localize(datetime.utcfromtimestamp(float(act["endTimestamp"]["millis"])/1000))
                logger.debug("Activity s/t " + str(activity.StartTime) + " on page " + str(page))
                activity.AdjustTZ()

                if "sumDistance" in act and float(act["sumDistance"]["value"]) != 0:
                    activity.Stats.Distance = ActivityStatistic(self._unitMap[act["sumDistance"]["uom"]], value=float(act["sumDistance"]["value"]))

                activity.Type = self._resolveActivityType(act["activityType"]["key"])

                activity.CalculateUID()
                
                activity.ServiceData = {"ActivityID": int(act["activityId"])}

                activities.append(activity)
            logger.debug("Finished page " + str(page) + " of " + str(res["search"]["totalPages"]))
            if not exhaustive or int(res["search"]["totalPages"]) == page:
                break
            else:
                page += 1
        return activities, exclusions
Пример #47
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        ns = {
            "tpw": "http://www.trainingpeaks.com/TPWebServices/",
            "xsi": "http://www.w3.org/2001/XMLSchema-instance"
        }
        activities = []
        exclusions = []

        reqData = self._authData(svcRecord)

        limitDateFormat = "%d %B %Y"

        if exhaustive:
            listEnd = datetime.now() + timedelta(
                days=1.5)  # Who knows which TZ it's in
            listStart = datetime(day=1, month=1,
                                 year=1980)  # The beginning of time
        else:
            listEnd = datetime.now() + timedelta(
                days=1.5)  # Who knows which TZ it's in
            listStart = listEnd - timedelta(days=20)  # Doesn't really matter

        lastActivityDay = None
        discoveredWorkoutIds = []
        while True:
            reqData.update({
                "startDate": listStart.strftime(limitDateFormat),
                "endDate": listEnd.strftime(limitDateFormat)
            })
            print("Requesting %s to %s" % (listStart, listEnd))
            resp = requests.post(
                "https://www.trainingpeaks.com/tpwebservices/service.asmx/GetWorkoutsForAthlete",
                data=reqData)
            xresp = etree.XML(resp.content)
            for xworkout in xresp:
                activity = UploadedActivity()

                workoutId = xworkout.find("tpw:WorkoutId", namespaces=ns).text

                workoutDayEl = xworkout.find("tpw:WorkoutDay", namespaces=ns)
                startTimeEl = xworkout.find("tpw:StartTime", namespaces=ns)

                workoutDay = dateutil.parser.parse(workoutDayEl.text)
                startTime = dateutil.parser.parse(
                    startTimeEl.text
                ) if startTimeEl is not None and startTimeEl.text else None

                if lastActivityDay is None or workoutDay.replace(
                        tzinfo=None) > lastActivityDay:
                    lastActivityDay = workoutDay.replace(tzinfo=None)

                if startTime is None:
                    continue  # Planned but not executed yet.
                activity.StartTime = startTime

                endTimeEl = xworkout.find("tpw:TimeTotalInSeconds",
                                          namespaces=ns)
                if not endTimeEl.text:
                    exclusions.append(
                        APIExcludeActivity("Activity has no duration",
                                           activityId=workoutId,
                                           userException=UserException(
                                               UserExceptionType.Corrupt)))
                    continue

                activity.EndTime = activity.StartTime + timedelta(
                    seconds=float(endTimeEl.text))

                distEl = xworkout.find("tpw:DistanceInMeters", namespaces=ns)
                if distEl.text:
                    activity.Stats.Distance = ActivityStatistic(
                        ActivityStatisticUnit.Meters, value=float(distEl.text))
                # PWX is damn near comprehensive, no need to fill in any of the other statisitcs here, really

                if workoutId in discoveredWorkoutIds:
                    continue  # There's the possibility of query overlap, if there are multiple activities on a single day that fall across the query return limit
                discoveredWorkoutIds.append(workoutId)

                workoutTypeEl = xworkout.find("tpw:WorkoutTypeDescription",
                                              namespaces=ns)
                if workoutTypeEl.text:
                    if workoutTypeEl.text == "Day Off":
                        continue  # TrainingPeaks has some weird activity types...
                    if workoutTypeEl.text not in self._workoutTypeMappings:
                        exclusions.append(
                            APIExcludeActivity("Activity type %s unknown" %
                                               workoutTypeEl.text,
                                               activityId=workoutId,
                                               userException=UserException(
                                                   UserExceptionType.Corrupt)))
                        continue
                    activity.Type = self._workoutTypeMappings[
                        workoutTypeEl.text]

                activity.ServiceData = {"WorkoutID": workoutId}
                activity.CalculateUID()
                activities.append(activity)

            if not exhaustive:
                break

            # Since TP only lets us query by date range, to get full activity history we need to query successively smaller ranges
            if len(xresp):
                if listStart == lastActivityDay:
                    break  # This wouldn't work if you had more than #MaxQueryReturn activities on that day - but that number is probably 50+
                listStart = lastActivityDay
            else:
                break  # We're done

        return activities, exclusions
Пример #48
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        oauthSession = self._oauthSession(serviceRecord)

        activities = []
        exclusions = []

        page_url = "https://api.endomondo.com/api/1/workouts"

        while True:
            resp = oauthSession.get(page_url)
            try:
                respList = resp.json()["data"]
            except ValueError:
                raise APIException("Error decoding activity list resp %s %s" % (resp.status_code, resp.text))
            for actInfo in respList:
                activity = UploadedActivity()
                activity.StartTime = self._parseDate(actInfo["start_time"])
                print("Activity s/t %s" % activity.StartTime)
                if "is_tracking" in actInfo and actInfo["is_tracking"]:
                    exclusions.append(APIExcludeActivity("Not complete", activityId=actInfo["id"], permanent=False, userException=UserException(UserExceptionType.LiveTracking)))
                    continue

                if "end_time" in actInfo:
                    activity.EndTime = self._parseDate(actInfo["end_time"])

                if actInfo["sport"] in self._activityMappings:
                    activity.Type = self._activityMappings[actInfo["sport"]]

                # "duration" is timer time
                if "duration_total" in actInfo:
                    activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(actInfo["duration_total"]))

                if "distance_total" in actInfo:
                    activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(actInfo["distance_total"]))

                if "calories_total" in actInfo:
                    activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=float(actInfo["calories_total"]))

                activity.Stats.Elevation = ActivityStatistic(ActivityStatisticUnit.Meters)

                if "altitude_max" in actInfo:
                    activity.Stats.Elevation.Max = float(actInfo["altitude_max"])

                if "altitude_min" in actInfo:
                    activity.Stats.Elevation.Min = float(actInfo["altitude_min"])

                if "total_ascent" in actInfo:
                    activity.Stats.Elevation.Gain = float(actInfo["total_ascent"])

                if "total_descent" in actInfo:
                    activity.Stats.Elevation.Loss = float(actInfo["total_descent"])

                activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour)
                if "speed_max" in actInfo:
                    activity.Stats.Speed.Max = float(actInfo["speed_max"])

                if "heart_rate_avg" in actInfo:
                    activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=float(actInfo["heart_rate_avg"]))

                if "heart_rate_max" in actInfo:
                    activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=float(actInfo["heart_rate_max"])))

                if "cadence_avg" in actInfo:
                    activity.Stats.Cadence = ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=int(actInfo["cadence_avg"]))

                if "cadence_max" in actInfo:
                    activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, max=int(actInfo["cadence_max"])))

                if "title" in actInfo:
                    activity.Name = actInfo["title"]

                activity.ServiceData = {"WorkoutID": int(actInfo["id"])}

                activity.CalculateUID()
                activities.append(activity)

            paging = resp.json()["paging"]
            if "next" not in paging or not paging["next"] or not exhaustive:
                break
            else:
                page_url = paging["next"]

        return activities, exclusions
Пример #49
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):

        def mapStatTriple(act, stats_obj, key, units):
            if "%s_max" % key in act and act["%s_max" % key]:
                stats_obj.update(ActivityStatistic(units, max=float(act["%s_max" % key])))
            if "%s_min" % key in act and act["%s_min" % key]:
                stats_obj.update(ActivityStatistic(units, min=float(act["%s_min" % key])))
            if "%s_avg" % key in act and act["%s_avg" % key]:
                stats_obj.update(ActivityStatistic(units, avg=float(act["%s_avg" % key])))


        # http://ridewithgps.com/users/1/trips.json?limit=200&order_by=created_at&order_dir=asc
        # offset also supported
        activities = []
        exclusions = []
        # They don't actually support paging right now, for whatever reason
        params = self._add_auth_params({}, record=serviceRecord)

        res = requests.get("http://ridewithgps.com/users/{}/trips.json".format(serviceRecord.ExternalID), params=params)
        res = res.json()

        # Apparently some API users are seeing this new result format - I'm not
        if type(res) is dict:
            res = res.get("results", [])

        if res == []:
            return [], [] # No activities
        for act in res:
            if "distance" not in act:
                exclusions.append(APIExcludeActivity("No distance", activity_id=act["id"], user_exception=UserException(UserExceptionType.Corrupt)))
                continue
            if "duration" not in act or not act["duration"]:
                exclusions.append(APIExcludeActivity("No duration", activity_id=act["id"], user_exception=UserException(UserExceptionType.Corrupt)))
                continue
            activity = UploadedActivity()

            logger.debug("Name " + act["name"] + ":")
            if len(act["name"].strip()):
                activity.Name = act["name"]

            if len(act["description"].strip()):
                activity.Notes = act["description"]

            activity.GPS = act["is_gps"]
            activity.Stationary = not activity.GPS # I think

            # 0 = public, 1 = private, 2 = friends
            activity.Private = act["visibility"] == 1

            activity.StartTime = dateutil.parser.parse(act["departed_at"])

            try:
                activity.TZ = pytz.timezone(act["time_zone"])
            except pytz.exceptions.UnknownTimeZoneError:
                # Sometimes the time_zone returned isn't quite what we'd like it
                # So, just pull the offset from the datetime
                if isinstance(activity.StartTime.tzinfo, tzutc):
                    activity.TZ = pytz.utc # The dateutil tzutc doesn't have an _offset value.
                else:
                    activity.TZ = pytz.FixedOffset(activity.StartTime.tzinfo.utcoffset(activity.StartTime).total_seconds() / 60)

            activity.StartTime = activity.StartTime.replace(tzinfo=activity.TZ) # Overwrite dateutil's sillyness

            activity.EndTime = activity.StartTime + timedelta(seconds=self._duration_to_seconds(act["duration"]))
            logger.debug("Activity s/t " + str(activity.StartTime))
            activity.AdjustTZ()

            activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, float(act["distance"]))

            mapStatTriple(act, activity.Stats.Power, "watts", ActivityStatisticUnit.Watts)
            mapStatTriple(act, activity.Stats.Speed, "speed", ActivityStatisticUnit.KilometersPerHour)
            mapStatTriple(act, activity.Stats.Cadence, "cad", ActivityStatisticUnit.RevolutionsPerMinute)
            mapStatTriple(act, activity.Stats.HR, "hr", ActivityStatisticUnit.BeatsPerMinute)

            if "elevation_gain" in act and act["elevation_gain"]:
                activity.Stats.Elevation.update(ActivityStatistic(ActivityStatisticUnit.Meters, gain=float(act["elevation_gain"])))

            if "elevation_loss" in act and act["elevation_loss"]:
                activity.Stats.Elevation.update(ActivityStatistic(ActivityStatisticUnit.Meters, loss=float(act["elevation_loss"])))

            # Activity type is not implemented yet in RWGPS results; we will assume cycling, though perhaps "OTHER" wouuld be correct
            activity.Type = ActivityType.Cycling

            activity.CalculateUID()
            activity.ServiceData = {"ActivityID": act["id"]}
            activities.append(activity)
        return activities, exclusions
Пример #50
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        """
        GET List of Activities as JSON File

        URL: http://app.velohero.com/export/workouts/json
        Parameters:
        user      = username
        pass      = password
        date_from = YYYY-MM-DD
        date_to   = YYYY-MM-DD
        """
        activities           = []
        exclusions           = []
        discoveredWorkoutIds = []

        params = self._add_auth_params({}, record=serviceRecord)

        limitDateFormat = "%Y-%m-%d"

        if exhaustive:
            listEnd   = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in
            listStart = datetime(day=1, month=1, year=1980) # The beginning of time
        else:
            listEnd = datetime.now() + timedelta(days=1.5) # Who knows which TZ it's in
            listStart = listEnd - timedelta(days=20) # Doesn't really matter

        params.update({"date_from": listStart.strftime(limitDateFormat), "date_to": listEnd.strftime(limitDateFormat)})
        logger.debug("Requesting %s to %s" % (listStart, listEnd))
        res = requests.get(self._urlRoot + "/export/workouts/json", params=params)

        if res.status_code != 200:
          if res.status_code == 403:
            raise APIException("Invalid login", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True))
          raise APIException("Unable to retrieve activity list")

        res.raise_for_status()
        try:
            res = res.json()
        except ValueError:
            raise APIException("Could not decode activity list")
        if "workouts" not in res:
            raise APIException("No activities")
        for workout in res["workouts"]:
            workoutId = int(workout["id"])
            if workoutId in discoveredWorkoutIds:
               continue # There's the possibility of query overlap
            discoveredWorkoutIds.append(workoutId)
            if workout["file"] is not "1":
               logger.debug("Skip workout with ID: " + str(workoutId) + " (no file)")
               continue # Skip activity without samples (no PWX export)

            activity = UploadedActivity()

            logger.debug("Workout ID: " + str(workoutId))
            # Duration (dur_time)
            duration = self._durationToSeconds(workout["dur_time"])
            activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=duration)
            # Start time (date_ymd, start_time)
            startTimeStr = workout["date_ymd"] + " " + workout["start_time"]
            activity.StartTime = self._parseDateTime(startTimeStr)
            # End time (date_ymd, start_time) + dur_time
            activity.EndTime = self._parseDateTime(startTimeStr) + timedelta(seconds=duration)
            # Sport (sport_id)
            if workout["sport_id"] in self._reverseActivityMappings:
                activity.Type = self._reverseActivityMappings[workout["sport_id"]]
            else:
                activity.Type = ActivityType.Other
            # Distance (dist_km)
            activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(workout["dist_km"]))
            # Workout is hidden
            activity.Private = workout["hide"] == "1"

            activity.ServiceData = {"workoutId": workoutId}
            activity.CalculateUID()
            activities.append(activity)

        return activities, exclusions
Пример #51
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        logger.debug("Checking motivato premium state")
        self._applyPaymentState(serviceRecord)

        logger.debug("Motivato DownloadActivityList")
        session = self._get_session(record=serviceRecord)
        activities = []
        exclusions = []

        self._rate_limit()

        retried_auth = False
        #headers = {'X-App-With-Tracks': "true"}
        headers = {}
        res = session.post(self._urlRoot + "/api/workouts/sync",
                           headers=headers)

        if res.status_code == 403 and not retried_auth:
            retried_auth = True
            session = self._get_session(serviceRecord, skip_cache=True)

        try:
            respList = res.json()
        except ValueError:
            res_txt = res.text  # So it can capture in the log message
            raise APIException("Parse failure in Motivato list resp: %s" %
                               res.status_code)

        for actInfo in respList:
            if "duration" in actInfo:
                duration = self._durationToSeconds(actInfo["duration"])
            else:
                continue

            activity = UploadedActivity()
            if "time_start" in actInfo["metas"]:
                startTimeStr = actInfo["training_at"] + " " + actInfo["metas"][
                    "time_start"]
            else:
                startTimeStr = actInfo["training_at"] + " 00:00:00"

            activity.StartTime = self._parseDateTime(startTimeStr)
            activity.EndTime = self._parseDateTime(startTimeStr) + timedelta(
                seconds=duration)
            activity.Type = self._reverseActivityMappings[
                actInfo["discipline_id"]]
            activity.Stats.TimerTime = ActivityStatistic(
                ActivityStatisticUnit.Seconds, value=duration)
            if "distance" in actInfo:
                activity.Stats.Distance = ActivityStatistic(
                    ActivityStatisticUnit.Kilometers,
                    value=float(actInfo["distance"]))
            #activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerSecond, value=1.0/float(actInfo["metas"]["pace"]))

            activity.ServiceData = {"WorkoutID": int(actInfo["id"])}

            activity.CalculateUID()
            logger.debug("Generated UID %s" % activity.UID)
            activities.append(activity)

        return activities, exclusions
Пример #52
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []
        before = earliestDate = None

        while True:
            if before is not None and before < 0:
                break # Caused by activities that "happened" before the epoch. We generally don't care about those activities...
            logger.debug("Req with before=" + str(before) + "/" + str(earliestDate))
            resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before})
            if resp.status_code == 401:
                raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True))

            earliestDate = None

            try:
                reqdata = resp.json()
            except ValueError:
                raise APIException("Failed parsing strava list response %s - %s" % (resp.status_code, resp.text))

            if not len(reqdata):
                break  # No more activities to see

            for ride in reqdata:
                activity = UploadedActivity()
                activity.TZ = pytz.timezone(re.sub("^\([^\)]+\)\s*", "", ride["timezone"]))  # Comes back as "(GMT -13:37) The Stuff/We Want""
                activity.StartTime = pytz.utc.localize(datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ"))
                logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, ride["name"]))
                if not earliestDate or activity.StartTime < earliestDate:
                    earliestDate = activity.StartTime
                    before = calendar.timegm(activity.StartTime.astimezone(pytz.utc).timetuple())

                activity.EndTime = activity.StartTime + timedelta(0, ride["elapsed_time"])
                activity.ServiceData = {"ActivityID": ride["id"], "Manual": ride["manual"]}

                if ride["type"] not in self._reverseActivityTypeMappings:
                    exclusions.append(APIExcludeActivity("Unsupported activity type %s" % ride["type"], activity_id=ride["id"], user_exception=UserException(UserExceptionType.Other)))
                    logger.debug("\t\tUnknown activity")
                    continue

                activity.Type = self._reverseActivityTypeMappings[ride["type"]]
                activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=ride["distance"])
                if "max_speed" in ride or "average_speed" in ride:
                    activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.MetersPerSecond, avg=ride["average_speed"] if "average_speed" in ride else None, max=ride["max_speed"] if "max_speed" in ride else None)
                activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=ride["moving_time"] if "moving_time" in ride and ride["moving_time"] > 0 else None)  # They don't let you manually enter this, and I think it returns 0 for those activities.
                # Strava doesn't handle "timer time" to the best of my knowledge - although they say they do look at the FIT total_timer_time field, so...?
                if "average_watts" in ride:
                    activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, avg=ride["average_watts"])
                if "average_heartrate" in ride:
                    activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=ride["average_heartrate"]))
                if "max_heartrate" in ride:
                    activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=ride["max_heartrate"]))
                if "average_cadence" in ride:
                    activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=ride["average_cadence"]))
                if "average_temp" in ride:
                    activity.Stats.Temperature.update(ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=ride["average_temp"]))
                if "calories" in ride:
                    activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=ride["calories"])
                activity.Name = ride["name"]
                activity.Private = ride["private"]
                activity.Stationary = ride["manual"]
                activity.GPS = ("start_latlng" in ride) and (ride["start_latlng"] is not None)
                activity.AdjustTZ()
                activity.CalculateUID()
                activities.append(activity)

            if not exhaustive or not earliestDate:
                break

        return activities, exclusions
Пример #53
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):

        activities = []
        exclusions = []
        earliestDate = None
        earliestFirstPageDate = None
        paged = False

        while True:
            before = "" if earliestDate is None else earliestDate.astimezone(
                pytz.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
            params = {
                "authToken": serviceRecord.Authorization["AuthToken"],
                "maxResults": 45,
                "before": before
            }
            logger.debug("Req with " + str(params))
            response = requests.get(
                "http://api.mobile.endomondo.com/mobile/api/workout/list",
                params=params)

            if response.status_code != 200:
                if response.status_code == 401 or response.status_code == 403:
                    raise APIException(
                        "No authorization to retrieve activity list",
                        block=True,
                        user_exception=UserException(
                            UserExceptionType.Authorization,
                            intervention_required=True))
                raise APIException("Unable to retrieve activity list " +
                                   str(response))
            data = response.json()

            if "error" in data and data["error"]["type"] == "AUTH_FAILED":
                raise APIException(
                    "No authorization to retrieve activity list",
                    block=True,
                    user_exception=UserException(
                        UserExceptionType.Authorization,
                        intervention_required=True))

            track_ids = []
            this_page_activities = []

            for act in data["data"]:
                startTime = pytz.utc.localize(
                    datetime.strptime(act["start_time"],
                                      "%Y-%m-%d %H:%M:%S UTC"))
                if earliestDate is None or startTime < earliestDate:  # probably redundant, I would assume it works out the TZes...
                    earliestDate = startTime
                logger.debug("activity pre")
                if not act["has_points"]:
                    logger.warning("\t no pts")
                    exclusions.append(
                        APIExcludeActivity("No points", activityId=act["id"]))
                    continue  # it'll break strava, which needs waypoints to find TZ. Meh
                if "tracking" in act and act["tracking"]:
                    logger.warning("\t tracking")
                    exclusions.append(
                        APIExcludeActivity("In progress",
                                           activityId=act["id"],
                                           permanent=False))
                    continue  # come back once they've completed the activity
                track_ids.append(act["id"])
                activity = UploadedActivity()
                activity.StartTime = startTime
                activity.EndTime = activity.StartTime + timedelta(
                    0, round(act["duration_sec"]))
                logger.debug("\tActivity s/t " + str(activity.StartTime))

                if int(act["sport"]) in self._activityMappings:
                    activity.Type = self._activityMappings[int(act["sport"])]
                activity.UploadedTo = [{
                    "Connection": serviceRecord,
                    "ActivityID": act["id"]
                }]

                this_page_activities.append(activity)

            cached_track_tzs = cachedb.endomondo_activity_cache.find(
                {"TrackID": {
                    "$in": track_ids
                }})
            cached_track_tzs = dict([(x["TrackID"], x)
                                     for x in cached_track_tzs])
            logger.debug("Have" + str(len(cached_track_tzs.keys())) + "/" +
                         str(len(track_ids)) + " cached TZ records")

            for activity in this_page_activities:
                # attn service makers: why #(*%$ can't you all agree to use naive local time. So much simpler.
                cachedTrackData = None
                track_id = activity.UploadedTo[0]["ActivityID"]
                if track_id not in cached_track_tzs:
                    logger.debug("\t Resolving TZ for %s" % activity.StartTime)
                    cachedTrackData = self._downloadRawTrackRecord(
                        serviceRecord, track_id)
                    try:
                        self._populateActivityFromTrackData(
                            activity, cachedTrackData, minimumWaypoints=True)
                    except APIExcludeActivity as e:
                        e.ExternalActivityID = track_id
                        logger.info("Encountered APIExcludeActivity %s" %
                                    str(e))
                        exclusions.append(e)
                        continue

                    if not activity.TZ:
                        logger.info("Couldn't determine TZ")
                        exclusions.append(
                            APIExcludeActivity("Couldn't determine TZ",
                                               activityId=track_id))
                        continue
                    cachedTrackRecord = {
                        "Owner": serviceRecord.ExternalID,
                        "TrackID": track_id,
                        "TZ": pickle.dumps(activity.TZ),
                        "StartTime": activity.StartTime
                    }
                    cachedb.endomondo_activity_cache.insert(cachedTrackRecord)
                else:
                    activity.TZ = pickle.loads(
                        cached_track_tzs[track_id]["TZ"])
                    activity.AdjustTZ()  # Everything returned is in UTC
                activity.UploadedTo[0]["ActivityData"] = cachedTrackData
                activity.Waypoints = []
                activity.CalculateUID()
                activities.append(activity)

            if not paged:
                earliestFirstPageDate = earliestDate
            if not exhaustive or ("more" in data and data["more"] is False):
                break
            else:
                paged = True
        return activities, exclusions
Пример #54
0
    def DownloadActivityList(self, svcRecord, exhaustive=False):
        activities = []
        exclusions = []
        before = earliestDate = None

        while True:
            if before is not None and before < 0:
                break # Caused by activities that "happened" before the epoch. We generally don't care about those activities...
            logger.debug("Req with before=" + str(before) + "/" + str(earliestDate))
            self._globalRateLimit()
            resp = requests.get("https://www.strava.com/api/v3/athletes/" + str(svcRecord.ExternalID) + "/activities", headers=self._apiHeaders(svcRecord), params={"before": before})
            if resp.status_code == 401:
                raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True))

            earliestDate = None

            reqdata = resp.json()

            if not len(reqdata):
                break  # No more activities to see

            for ride in reqdata:
                activity = UploadedActivity()
                activity.TZ = pytz.timezone(re.sub("^\([^\)]+\)\s*", "", ride["timezone"]))  # Comes back as "(GMT -13:37) The Stuff/We Want""
                activity.StartTime = pytz.utc.localize(datetime.strptime(ride["start_date"], "%Y-%m-%dT%H:%M:%SZ"))
                logger.debug("\tActivity s/t %s: %s" % (activity.StartTime, ride["name"]))
                if not earliestDate or activity.StartTime < earliestDate:
                    earliestDate = activity.StartTime
                    before = calendar.timegm(activity.StartTime.astimezone(pytz.utc).timetuple())

                activity.EndTime = activity.StartTime + timedelta(0, ride["elapsed_time"])
                activity.ServiceData = {"ActivityID": ride["id"], "Manual": ride["manual"]}

                if ride["type"] not in self._reverseActivityTypeMappings:
                    exclusions.append(APIExcludeActivity("Unsupported activity type %s" % ride["type"], activityId=ride["id"], userException=UserException(UserExceptionType.Other)))
                    logger.debug("\t\tUnknown activity")
                    continue

                activity.Type = self._reverseActivityTypeMappings[ride["type"]]
                activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Meters, value=ride["distance"])
                if "max_speed" in ride or "average_speed" in ride:
                    activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.MetersPerSecond, avg=ride["average_speed"] if "average_speed" in ride else None, max=ride["max_speed"] if "max_speed" in ride else None)
                activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=ride["moving_time"] if "moving_time" in ride and ride["moving_time"] > 0 else None)  # They don't let you manually enter this, and I think it returns 0 for those activities.
                # Strava doesn't handle "timer time" to the best of my knowledge - although they say they do look at the FIT total_timer_time field, so...?
                if "average_watts" in ride:
                    activity.Stats.Power = ActivityStatistic(ActivityStatisticUnit.Watts, avg=ride["average_watts"])
                if "average_heartrate" in ride:
                    activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=ride["average_heartrate"]))
                if "max_heartrate" in ride:
                    activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=ride["max_heartrate"]))
                if "average_cadence" in ride:
                    activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=ride["average_cadence"]))
                if "average_temp" in ride:
                    activity.Stats.Temperature.update(ActivityStatistic(ActivityStatisticUnit.DegreesCelcius, avg=ride["average_temp"]))
                if "calories" in ride:
                    activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=ride["calories"])
                activity.Name = ride["name"]
                activity.Private = ride["private"]
                activity.Stationary = ride["manual"]
                activity.GPS = ("start_latlng" in ride) and (ride["start_latlng"] is not None)
                activity.AdjustTZ()
                activity.CalculateUID()
                activities.append(activity)

            if not exhaustive or not earliestDate:
                break

        return activities, exclusions
Пример #55
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):
        """
        GET List of Activities as JSON File

        URL: http://app.velohero.com/export/workouts/json
        Parameters:
        user      = username
        pass      = password
        date_from = YYYY-MM-DD
        date_to   = YYYY-MM-DD
        """
        activities = []
        exclusions = []
        discoveredWorkoutIds = []

        params = self._add_auth_params({}, record=serviceRecord)

        limitDateFormat = "%Y-%m-%d"

        if exhaustive:
            listEnd = datetime.now() + timedelta(
                days=1.5)  # Who knows which TZ it's in
            listStart = datetime(day=1, month=1,
                                 year=1980)  # The beginning of time
        else:
            listEnd = datetime.now() + timedelta(
                days=1.5)  # Who knows which TZ it's in
            listStart = listEnd - timedelta(days=20)  # Doesn't really matter

        params.update({
            "date_from": listStart.strftime(limitDateFormat),
            "date_to": listEnd.strftime(limitDateFormat)
        })
        logger.debug("Requesting %s to %s" % (listStart, listEnd))
        res = requests.get(self._urlRoot + "/export/workouts/json",
                           params=params)

        if res.status_code != 200:
            if res.status_code == 403:
                raise APIException("Invalid login",
                                   block=True,
                                   user_exception=UserException(
                                       UserExceptionType.Authorization,
                                       intervention_required=True))
            raise APIException("Unable to retrieve activity list")

        res.raise_for_status()
        try:
            res = res.json()
        except ValueError:
            raise APIException("Could not decode activity list")
        if "workouts" not in res:
            raise APIException("No activities")
        for workout in res["workouts"]:
            workoutId = int(workout["id"])
            if workoutId in discoveredWorkoutIds:
                continue  # There's the possibility of query overlap
            discoveredWorkoutIds.append(workoutId)
            if workout["file"] is not "1":
                logger.debug("Skip workout with ID: " + str(workoutId) +
                             " (no file)")
                continue  # Skip activity without samples (no PWX export)

            activity = UploadedActivity()

            logger.debug("Workout ID: " + str(workoutId))
            # Duration (dur_time)
            duration = self._durationToSeconds(workout["dur_time"])
            activity.Stats.TimerTime = ActivityStatistic(
                ActivityStatisticUnit.Seconds, value=duration)
            # Start time (date_ymd, start_time)
            startTimeStr = workout["date_ymd"] + " " + workout["start_time"]
            activity.StartTime = self._parseDateTime(startTimeStr)
            # End time (date_ymd, start_time) + dur_time
            activity.EndTime = self._parseDateTime(startTimeStr) + timedelta(
                seconds=duration)
            # Sport (sport_id)
            if workout["sport_id"] is not "0":
                activity.Type = self._reverseActivityMappings[int(
                    workout["sport_id"])]
            else:
                activity.Type = ActivityType.Cycling
            # Distance (dist_km)
            activity.Stats.Distance = ActivityStatistic(
                ActivityStatisticUnit.Kilometers,
                value=float(workout["dist_km"]))
            # Workout is hidden
            activity.Private = workout["hide"] == "1"

            activity.ServiceData = {"workoutId": workoutId}
            activity.CalculateUID()
            activities.append(activity)

        return activities, exclusions