예제 #1
0
    def DownloadActivityList(self, svcRec, exhaustive=False):
        dbcl = self._getClient(svcRec)
        if not svcRec.Authorization["Full"]:
            syncRoot = "/"
        else:
            syncRoot = svcRec.Config["SyncRoot"]
        cache = cachedb.dropbox_cache.find_one({"ExternalID": svcRec.ExternalID})
        if cache is None:
            cache = {"ExternalID": svcRec.ExternalID, "Structure": [], "Activities": {}}
        if "Structure" not in cache:
            cache["Structure"] = []
        self._folderRecurse(cache["Structure"], dbcl, syncRoot)

        activities = []
        exclusions = []

        for dir in cache["Structure"]:
            for file in dir["Files"]:
                path = file["Path"]
                if svcRec.Authorization["Full"]:
                    relPath = path.replace(syncRoot, "", 1)
                else:
                    relPath = path.replace("/Apps/tapiriik/", "", 1)  # dropbox api is meh api

                existing = [(k, x) for k, x in cache["Activities"].items() if x["Path"] == relPath]  # path is relative to syncroot to reduce churn if they relocate it
                existing = existing[0] if existing else None
                if existing is not None:
                    existUID, existing = existing
                if existing and existing["Rev"] == file["Rev"]:
                    # don't need entire activity loaded here, just UID
                    act = UploadedActivity()
                    act.UID = existUID
                    act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y %z")
                    if "EndTime" in existing:  # some cached activities may not have this, it is not essential
                        act.EndTime = datetime.strptime(existing["EndTime"], "%H:%M:%S %d %m %Y %z")
                else:
                    logger.debug("Retrieving %s (%s)" % (path, "outdated meta cache" if existing else "not in meta cache"))
                    # get the full activity
                    try:
                        act, rev = self._getActivity(svcRec, dbcl, path)
                    except APIExcludeActivity as e:
                        logger.info("Encountered APIExcludeActivity %s" % str(e))
                        exclusions.append(e)
                        continue
                    del act.Laps
                    act.Laps = []  # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM.
                    cache["Activities"][act.UID] = {"Rev": rev, "Path": relPath, "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"), "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")}
                tagRes = self._tagActivity(relPath)
                act.ServiceData = {"Path": path, "Tagged":tagRes is not None}

                act.Type = tagRes if tagRes is not None else ActivityType.Other

                logger.debug("Activity s/t %s" % act.StartTime)

                activities.append(act)

        cachedb.dropbox_cache.update({"ExternalID": svcRec.ExternalID}, cache, upsert=True)
        return activities, exclusions
예제 #2
0
파일: dropbox.py 프로젝트: hegge/tapiriik
    def DownloadActivityList(self, svcRec, exhaustive=False):
        dbcl = self._getClient(svcRec)
        if not svcRec.Authorization["Full"]:
            syncRoot = "/"
        else:
            syncRoot = svcRec.Config["SyncRoot"]
        cache = cachedb.dropbox_cache.find_one({"ExternalID": svcRec.ExternalID})
        if cache is None:
            cache = {"ExternalID": svcRec.ExternalID, "Structure": [], "Activities": {}}
        if "Structure" not in cache:
            cache["Structure"] = []
        self._folderRecurse(cache["Structure"], dbcl, syncRoot)

        activities = []
        exclusions = []

        for dir in cache["Structure"]:
            for file in dir["Files"]:
                path = file["Path"]
                if svcRec.Authorization["Full"]:
                    relPath = path.replace(syncRoot, "", 1)
                else:
                    relPath = path.replace("/Apps/tapiriik/", "", 1)  # dropbox api is meh api

                hashedRelPath = self._hash_path(relPath)
                if hashedRelPath in cache["Activities"]:
                    existing = cache["Activities"][hashedRelPath]
                else:
                    existing = None

                if not existing:
                    # Continue to use the old records keyed by UID where possible
                    existing = [
                        (k, x) for k, x in cache["Activities"].items() if "Path" in x and x["Path"] == relPath
                    ]  # path is relative to syncroot to reduce churn if they relocate it
                    existing = existing[0] if existing else None
                    if existing is not None:
                        existUID, existing = existing
                        existing["UID"] = existUID

                if existing and existing["Rev"] == file["Rev"]:
                    # don't need entire activity loaded here, just UID
                    act = UploadedActivity()
                    act.UID = existing["UID"]
                    try:
                        act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y %z")
                    except:
                        act.StartTime = datetime.strptime(
                            existing["StartTime"], "%H:%M:%S %d %m %Y"
                        )  # Exactly one user has managed to break %z :S
                    if "EndTime" in existing:  # some cached activities may not have this, it is not essential
                        act.EndTime = datetime.strptime(existing["EndTime"], "%H:%M:%S %d %m %Y %z")
                else:
                    logger.debug(
                        "Retrieving %s (%s)" % (path, "outdated meta cache" if existing else "not in meta cache")
                    )
                    # get the full activity
                    try:
                        act, rev = self._getActivity(svcRec, dbcl, path)
                    except APIExcludeActivity as e:
                        logger.info("Encountered APIExcludeActivity %s" % str(e))
                        exclusions.append(strip_context(e))
                        continue

                    try:
                        act.EnsureTZ()
                    except:
                        pass  # We tried.

                    if hasattr(act, "OriginatedFromTapiriik") and not act.CountTotalWaypoints():
                        # This is one of the files created when TCX export was hopelessly broken for non-GPS activities.
                        # Right now, no activities in dropbox from tapiriik should be devoid of waypoints - since dropbox doesn't receive stationary activities
                        # In the future when this changes, will obviously have to modify this code to also look at modification dates or similar.
                        if ".tcx.summary-data" in path:
                            logger.info("...summary file already moved")
                        else:
                            logger.info("...moving summary-only file")
                            dbcl.file_move(path, path.replace(".tcx", ".tcx.summary-data"))
                        continue  # DON'T include in listing - it'll be regenerated
                    del act.Laps
                    act.Laps = (
                        []
                    )  # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM.
                    cache["Activities"][hashedRelPath] = {
                        "Rev": rev,
                        "UID": act.UID,
                        "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"),
                        "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z"),
                    }
                tagRes = self._tagActivity(relPath)
                act.ServiceData = {"Path": path, "Tagged": tagRes is not None}

                act.Type = tagRes if tagRes is not None else ActivityType.Other

                logger.debug("Activity s/t %s" % act.StartTime)

                activities.append(act)

        if "_id" in cache:
            cachedb.dropbox_cache.save(cache)
        else:
            cachedb.dropbox_cache.insert(cache)
        return activities, exclusions
예제 #3
0
    def DownloadActivityList(self, svcRec, exhaustive=False):
        dbcl = self._getClient(svcRec)
        if not svcRec.Authorization["Full"]:
            syncRoot = "/"
        else:
            syncRoot = svcRec.Config["SyncRoot"]
        # Dropbox API v2 doesn't like / as root.
        if syncRoot == "/":
            syncRoot = ""
        # New Dropbox API prefers path_lower, it would seem.
        syncRoot = syncRoot.lower()

        # There used to be a massive affair going on here to cache the folder structure locally.
        # Dropbox API 2.0 doesn't support the hashes I need for that.
        # Oh well. Throw that data out now. Well, don't load it at all.
        cache = cachedb.dropbox_cache.find_one(
            {"ExternalID": svcRec.ExternalID}, {
                "ExternalID": True,
                "Activities": True
            })
        if cache is None:
            cache = {"ExternalID": svcRec.ExternalID, "Activities": {}}

        try:
            list_result = dbcl.files_list_folder(syncRoot, recursive=True)
        except dropbox.exceptions.DropboxException as e:
            self._raiseDbException(e)

        def cache_writeback():
            if "_id" in cache:
                cachedb.dropbox_cache.save(cache)
            else:
                insert_result = cachedb.dropbox_cache.insert(cache)
                cache["_id"] = insert_result.inserted_id

        activities = []
        exclusions = []
        discovered_activity_cache_keys = set()

        while True:
            for entry in list_result.entries:
                if not hasattr(entry, "rev"):
                    # Not a file -> we don't care.
                    continue
                path = entry.path_lower

                if not path.endswith(".gpx") and not path.endswith(".tcx"):
                    # Not an activity file -> we don't care.
                    continue

                if svcRec.Authorization["Full"]:
                    relPath = path.replace(syncRoot, "", 1)
                else:
                    relPath = path.replace("/Apps/tapiriik/", "",
                                           1)  # dropbox api is meh api

                hashedRelPath = self._hash_path(relPath)
                discovered_activity_cache_keys.add(hashedRelPath)
                if hashedRelPath in cache["Activities"]:
                    existing = cache["Activities"][hashedRelPath]
                else:
                    existing = None

                if existing and existing["Rev"] == entry.rev:
                    # don't need entire activity loaded here, just UID
                    act = UploadedActivity()
                    act.UID = existing["UID"]
                    try:
                        act.StartTime = datetime.strptime(
                            existing["StartTime"], "%H:%M:%S %d %m %Y %z")
                    except:
                        act.StartTime = datetime.strptime(
                            existing["StartTime"], "%H:%M:%S %d %m %Y"
                        )  # Exactly one user has managed to break %z :S
                    if "EndTime" in existing:  # some cached activities may not have this, it is not essential
                        act.EndTime = datetime.strptime(
                            existing["EndTime"], "%H:%M:%S %d %m %Y %z")
                else:
                    logger.debug("Retrieving %s (%s)" %
                                 (path, "outdated meta cache"
                                  if existing else "not in meta cache"))
                    # get the full activity
                    try:
                        act, rev = self._getActivity(svcRec, dbcl, path)
                    except APIExcludeActivity as e:
                        logger.info("Encountered APIExcludeActivity %s" %
                                    str(e))
                        exclusions.append(strip_context(e))
                        continue

                    try:
                        act.EnsureTZ()
                    except:
                        pass  # We tried.

                    act.Laps = [
                    ]  # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM.
                    cache["Activities"][hashedRelPath] = {
                        "Rev": rev,
                        "UID": act.UID,
                        "StartTime":
                        act.StartTime.strftime("%H:%M:%S %d %m %Y %z"),
                        "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")
                    }
                    # Incrementally update the cache db.
                    # Otherwise, if we crash later on in listing
                    # (due to OOM or similar), we'll never make progress on this account.
                    cache_writeback()
                tagRes = self._tagActivity(relPath)
                act.ServiceData = {"Path": path, "Tagged": tagRes is not None}

                act.Type = tagRes if tagRes is not None else ActivityType.Other

                logger.debug("Activity s/t %s" % act.StartTime)

                activities.append(act)

            # Perform pagination.
            if list_result.has_more:
                list_result = dbcl.files_list_folder_continue(
                    list_result.cursor)
            else:
                break

        # Drop deleted activities' records from cache.
        all_activity_cache_keys = set(cache["Activities"].keys())
        for deleted_key in all_activity_cache_keys - discovered_activity_cache_keys:
            del cache["Activities"][deleted_key]

        cache_writeback()
        return activities, exclusions
예제 #4
0
파일: dropbox.py 프로젝트: cpfair/tapiriik
    def DownloadActivityList(self, svcRec, exhaustive=False):
        dbcl = self._getClient(svcRec)
        if not svcRec.Authorization["Full"]:
            syncRoot = "/"
        else:
            syncRoot = svcRec.Config["SyncRoot"]
        # Dropbox API v2 doesn't like / as root.
        if syncRoot == "/":
            syncRoot = ""
        # New Dropbox API prefers path_lower, it would seem.
        syncRoot = syncRoot.lower()

        # There used to be a massive affair going on here to cache the folder structure locally.
        # Dropbox API 2.0 doesn't support the hashes I need for that.
        # Oh well. Throw that data out now. Well, don't load it at all.
        cache = cachedb.dropbox_cache.find_one({"ExternalID": svcRec.ExternalID}, {"ExternalID": True, "Activities": True})
        if cache is None:
            cache = {"ExternalID": svcRec.ExternalID, "Activities": {}}

        try:
            list_result = dbcl.files_list_folder(syncRoot, recursive=True)
        except dropbox.exceptions.DropboxException as e:
            self._raiseDbException(e)

        def cache_writeback():
            if "_id" in cache:
                cachedb.dropbox_cache.save(cache)
            else:
                insert_result = cachedb.dropbox_cache.insert(cache)
                cache["_id"] = insert_result.inserted_id


        activities = []
        exclusions = []
        discovered_activity_cache_keys = set()

        while True:
            for entry in list_result.entries:
                if not hasattr(entry, "rev"):
                    # Not a file -> we don't care.
                    continue
                path = entry.path_lower

                if not path.endswith(".gpx") and not path.endswith(".tcx"):
                    # Not an activity file -> we don't care.
                    continue

                if svcRec.Authorization["Full"]:
                    relPath = path.replace(syncRoot, "", 1)
                else:
                    relPath = path.replace("/Apps/tapiriik/", "", 1)  # dropbox api is meh api

                hashedRelPath = self._hash_path(relPath)
                discovered_activity_cache_keys.add(hashedRelPath)
                if hashedRelPath in cache["Activities"]:
                    existing = cache["Activities"][hashedRelPath]
                else:
                    existing = None

                if existing and existing["Rev"] == entry.rev:
                    # don't need entire activity loaded here, just UID
                    act = UploadedActivity()
                    act.UID = existing["UID"]
                    try:
                        act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y %z")
                    except:
                        act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y") # Exactly one user has managed to break %z :S
                    if "EndTime" in existing:  # some cached activities may not have this, it is not essential
                        act.EndTime = datetime.strptime(existing["EndTime"], "%H:%M:%S %d %m %Y %z")
                else:
                    logger.debug("Retrieving %s (%s)" % (path, "outdated meta cache" if existing else "not in meta cache"))
                    # get the full activity
                    try:
                        act, rev = self._getActivity(svcRec, dbcl, path)
                    except APIExcludeActivity as e:
                        logger.info("Encountered APIExcludeActivity %s" % str(e))
                        exclusions.append(strip_context(e))
                        continue

                    try:
                        act.EnsureTZ()
                    except:
                        pass # We tried.

                    act.Laps = []  # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM.
                    cache["Activities"][hashedRelPath] = {"Rev": rev, "UID": act.UID, "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"), "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")}
                    # Incrementally update the cache db.
                    # Otherwise, if we crash later on in listing
                    # (due to OOM or similar), we'll never make progress on this account.
                    cache_writeback()
                tagRes = self._tagActivity(relPath)
                act.ServiceData = {"Path": path, "Tagged": tagRes is not None}

                act.Type = tagRes if tagRes is not None else ActivityType.Other

                logger.debug("Activity s/t %s" % act.StartTime)

                activities.append(act)

            # Perform pagination.
            if list_result.has_more:
                list_result = dbcl.files_list_folder_continue(list_result.cursor)
            else:
                break

        # Drop deleted activities' records from cache.
        all_activity_cache_keys = set(cache["Activities"].keys())
        for deleted_key in all_activity_cache_keys - discovered_activity_cache_keys:
            del cache["Activities"][deleted_key]

        cache_writeback()
        return activities, exclusions
예제 #5
0
    def DownloadActivityList(self, svcRec, exhaustive=False):
        dbcl = self._getClient(svcRec)
        if not svcRec.Authorization["Full"]:
            syncRoot = "/"
        else:
            syncRoot = svcRec.Config["SyncRoot"]
        cache = cachedb.dropbox_cache.find_one(
            {"ExternalID": svcRec.ExternalID})
        if cache is None:
            cache = {
                "ExternalID": svcRec.ExternalID,
                "Structure": [],
                "Activities": {}
            }
        if "Structure" not in cache:
            cache["Structure"] = []
        self._folderRecurse(cache["Structure"], dbcl, syncRoot)

        activities = []
        exclusions = []

        for dir in cache["Structure"]:
            for file in dir["Files"]:
                path = file["Path"]
                if svcRec.Authorization["Full"]:
                    relPath = path.replace(syncRoot, "", 1)
                else:
                    relPath = path.replace("/Apps/tapiriik/", "",
                                           1)  # dropbox api is meh api

                hashedRelPath = self._hash_path(relPath)
                if hashedRelPath in cache["Activities"]:
                    existing = cache["Activities"][hashedRelPath]
                else:
                    existing = None

                if not existing:
                    # Continue to use the old records keyed by UID where possible
                    existing = [
                        (k, x) for k, x in cache["Activities"].items()
                        if "Path" in x and x["Path"] == relPath
                    ]  # path is relative to syncroot to reduce churn if they relocate it
                    existing = existing[0] if existing else None
                    if existing is not None:
                        existUID, existing = existing
                        existing["UID"] = existUID

                if existing and existing["Rev"] == file["Rev"]:
                    # don't need entire activity loaded here, just UID
                    act = UploadedActivity()
                    act.UID = existing["UID"]
                    try:
                        act.StartTime = datetime.strptime(
                            existing["StartTime"], "%H:%M:%S %d %m %Y %z")
                    except:
                        act.StartTime = datetime.strptime(
                            existing["StartTime"], "%H:%M:%S %d %m %Y"
                        )  # Exactly one user has managed to break %z :S
                    if "EndTime" in existing:  # some cached activities may not have this, it is not essential
                        act.EndTime = datetime.strptime(
                            existing["EndTime"], "%H:%M:%S %d %m %Y %z")
                else:
                    logger.debug("Retrieving %s (%s)" %
                                 (path, "outdated meta cache"
                                  if existing else "not in meta cache"))
                    # get the full activity
                    try:
                        act, rev = self._getActivity(svcRec, dbcl, path)
                    except APIExcludeActivity as e:
                        logger.info("Encountered APIExcludeActivity %s" %
                                    str(e))
                        exclusions.append(strip_context(e))
                        continue

                    try:
                        act.EnsureTZ()
                    except:
                        pass  # We tried.

                    if hasattr(act, "OriginatedFromTapiriik"
                               ) and not act.CountTotalWaypoints():
                        # This is one of the files created when TCX export was hopelessly broken for non-GPS activities.
                        # Right now, no activities in dropbox from tapiriik should be devoid of waypoints - since dropbox doesn't receive stationary activities
                        # In the future when this changes, will obviously have to modify this code to also look at modification dates or similar.
                        if ".tcx.summary-data" in path:
                            logger.info("...summary file already moved")
                        else:
                            logger.info("...moving summary-only file")
                            dbcl.file_move(
                                path, path.replace(".tcx",
                                                   ".tcx.summary-data"))
                        continue  # DON'T include in listing - it'll be regenerated
                    del act.Laps
                    act.Laps = [
                    ]  # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM.
                    cache["Activities"][hashedRelPath] = {
                        "Rev": rev,
                        "UID": act.UID,
                        "StartTime":
                        act.StartTime.strftime("%H:%M:%S %d %m %Y %z"),
                        "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")
                    }
                tagRes = self._tagActivity(relPath)
                act.ServiceData = {"Path": path, "Tagged": tagRes is not None}

                act.Type = tagRes if tagRes is not None else ActivityType.Other

                logger.debug("Activity s/t %s" % act.StartTime)

                activities.append(act)

        if "_id" in cache:
            cachedb.dropbox_cache.save(cache)
        else:
            cachedb.dropbox_cache.insert(cache)
        return activities, exclusions
예제 #6
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):

        activities = []
        exclusions = []
        earliestDate = None
        earliestFirstPageDate = None
        paged = False

        while True:
            before = "" if earliestDate is None else earliestDate.astimezone(pytz.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
            params = {"authToken": serviceRecord.Authorization["AuthToken"], "maxResults": 45, "before": before}
            logger.debug("Req with " + str(params))
            response = requests.get("http://api.mobile.endomondo.com/mobile/api/workout/list", params=params)

            if response.status_code != 200:
                if response.status_code == 401 or response.status_code == 403:
                    raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True))
                raise APIException("Unable to retrieve activity list " + str(response))
            data = response.json()

            if "error" in data and data["error"]["type"] == "AUTH_FAILED":
                raise APIException("No authorization to retrieve activity list", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True))

            track_ids = []
            this_page_activities = []
            for act in data["data"]:
                startTime = pytz.utc.localize(datetime.strptime(act["start_time"], "%Y-%m-%d %H:%M:%S UTC"))
                if earliestDate is None or startTime < earliestDate:  # probably redundant, I would assume it works out the TZes...
                    earliestDate = startTime
                logger.debug("activity pre")
                if "tracking" in act and act["tracking"]:
                    logger.warning("\t tracking")
                    exclusions.append(APIExcludeActivity("In progress", activityId=act["id"], permanent=False))
                    continue  # come back once they've completed the activity
                track_ids.append(act["id"])
                activity = UploadedActivity()
                activity.StartTime = startTime
                activity.EndTime = activity.StartTime + timedelta(0, round(act["duration_sec"]))
                logger.debug("\tActivity s/t " + str(activity.StartTime))

                activity.Stationary = not act["has_points"]

                if int(act["sport"]) in self._activityMappings:
                    activity.Type = self._activityMappings[int(act["sport"])]
                activity.ServiceData = {"ActivityID": act["id"]}

                this_page_activities.append(activity)
            cached_track_tzs = cachedb.endomondo_activity_cache.find({"TrackID":{"$in": track_ids}})
            cached_track_tzs = dict([(x["TrackID"], x) for x in cached_track_tzs])
            logger.debug("Have" + str(len(cached_track_tzs.keys())) + "/" + str(len(track_ids)) + " cached TZ records")

            for activity in this_page_activities:
                # attn service makers: why #(*%$ can't you all agree to use naive local time. So much simpler.
                cachedTrackData = None
                track_id = activity.ServiceData["ActivityID"]

                if track_id not in cached_track_tzs:
                    logger.debug("\t Resolving TZ for %s" % activity.StartTime)
                    cachedTrackData = self._downloadRawTrackRecord(serviceRecord, track_id)
                    try:
                        self._populateActivityFromTrackData(activity, cachedTrackData, minimumWaypoints=True)
                    except APIExcludeActivity as e:
                        e.ExternalActivityID = track_id
                        logger.info("Encountered APIExcludeActivity %s" % str(e))
                        exclusions.append(e)
                        continue

                    if not activity.TZ and not activity.Stationary:
                        logger.info("Couldn't determine TZ")
                        exclusions.append(APIExcludeActivity("Couldn't determine TZ", activityId=track_id))
                        continue
                    cachedTrackRecord = {"Owner": serviceRecord.ExternalID, "TrackID": track_id, "TZ": pickle.dumps(activity.TZ), "StartTime": activity.StartTime}
                    cachedb.endomondo_activity_cache.insert(cachedTrackRecord)
                elif not activity.Stationary:
                    activity.TZ = pickle.loads(cached_track_tzs[track_id]["TZ"])
                    activity.AdjustTZ()  # Everything returned is in UTC

                activity.Laps = []
                if int(act["sport"]) in self._activityMappings:
                    activity.Type = self._activityMappings[int(act["sport"])]

                activity.ServiceData = {"ActivityID": act["id"], "ActivityData": cachedTrackData}
                activity.CalculateUID()
                activities.append(activity)

            if not paged:
                earliestFirstPageDate = earliestDate
            if not exhaustive or ("more" in data and data["more"] is False):
                break
            else:
                paged = True
        return activities, exclusions
예제 #7
0
    def _populate_sbr_activity(self, api_sbr_activity, usersettings):
        # Example JSON feed (unimportant fields have been removed)
        # [{
        #    "EventId": 63128401,                   #  Internal ID
        #    "EventType": 3,                        #  Swim (3), bike (1), or run (2)
        #    "EventDate": "4/22/2016",
        #    "EventTime": "7:44 AM",                #  User's time, time zone not specified
        #    "Planned": false,                      #  Training plan or actual data
        #    "TotalMinutes": 34.97,
        #    "TotalKilometers": 1.55448,
        #    "AverageHeartRate": 125,
        #    "MinimumHeartRate": 100,
        #    "MaximumHeartRate": 150,
        #    "MemberId": 999999,
        #    "MemberUsername": "******",
        #    "HasDeviceUpload": true,
        #    "DeviceUploadFile": "http://beginnertriathlete.com/discussion/storage/workouts/555555/abcd-123.fit",
        #    "RouteName": "",                       #  Might contain a description of the event
        #    "Comments": "",                        #  Same as above. Not overly often used.
        # }, ... ]

        activity = UploadedActivity()
        workout_id = api_sbr_activity["EventId"]
        eventType = api_sbr_activity["EventType"]
        eventDate = api_sbr_activity["EventDate"]
        eventTime = api_sbr_activity["EventTime"]
        totalMinutes = api_sbr_activity["TotalMinutes"]
        totalKms = api_sbr_activity["TotalKilometers"]
        averageHr = api_sbr_activity["AverageHeartRate"]
        minimumHr = api_sbr_activity["MinimumHeartRate"]
        maximumHr = api_sbr_activity["MaximumHeartRate"]
        deviceUploadFile = api_sbr_activity["DeviceUploadFile"]

        # Basic SBR data does not include GPS or sensor data. If this event originated from a device upload,
        # DownloadActivity will find it.
        activity.Stationary = True

        # Same as above- The data might be there, but it's not supplied in the basic activity feed.
        activity.GPS = False

        activity.Private = usersettings["Privacy"]
        activity.Type = self._workoutTypeMappings[str(eventType)]

        # Get the user's timezone from their profile. (Activity.TZ should be mentioned in the object hierarchy docs?)
        # Question: I believe if DownloadActivity finds device data, it will overwrite this. Which is OK with me.
        # The device data will most likely be more accurate.
        try:
            activity.TZ = pytz.timezone(usersettings["TimeZone"])
        except pytz.exceptions.UnknownTimeZoneError:
            activity.TZ = pytz.timezone(self._serverDefaultTimezone)

        # activity.StartTime and EndTime aren't mentioned in the object hierarchy docs, but I see them
        # set in all the other providers.
        activity.StartTime = dateutil.parser.parse(
            eventDate + " " + eventTime,
            dayfirst=False).replace(tzinfo=activity.TZ)
        activity.EndTime = activity.StartTime + timedelta(minutes=totalMinutes)

        # We can calculate some metrics from the supplied data. Would love to see some non-source code documentation
        # on each statistic and what it expects as input.
        activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers,
                                                    value=totalKms)
        activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute,
                                              avg=float(averageHr),
                                              min=float(minimumHr),
                                              max=float(maximumHr))
        activity.Stats.MovingTime = ActivityStatistic(ActivityStatisticUnit.Seconds,
                                                      value=float(totalMinutes * 60))
        activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds,
                                                     value=float(totalMinutes * 60))
        # While BT does support laps, the current API doesn't report on them - a limitation that may need to be
        # corrected in a future update. For now, treat manual entries as a single lap. As more and more people upload
        # workouts using devices anyway, this probably matters much less than it once did.
        lap = Lap(stats=activity.Stats, startTime=activity.StartTime, endTime=activity.EndTime)
        activity.Laps = [lap]

        # Not 100% positive how this is utilized, but it is common for all providers. Detects duplicate downloads?
        activity.CalculateUID()

        # If a device file is attached, we'll get more details about this event in DownloadActivity
        activity.ServiceData = {
            "ID": int(workout_id),
            "DeviceUploadFile": deviceUploadFile
        }

        return activity
예제 #8
0
    def DownloadActivityList(self, serviceRecord, exhaustive=False):

        activities = []
        exclusions = []
        earliestDate = None
        earliestFirstPageDate = None
        paged = False

        while True:
            before = "" if earliestDate is None else earliestDate.astimezone(
                pytz.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
            params = {
                "authToken": serviceRecord.Authorization["AuthToken"],
                "maxResults": 45,
                "before": before
            }
            logger.debug("Req with " + str(params))
            response = requests.get(
                "http://api.mobile.endomondo.com/mobile/api/workout/list",
                params=params)

            if response.status_code != 200:
                if response.status_code == 401 or response.status_code == 403:
                    raise APIException(
                        "No authorization to retrieve activity list",
                        block=True,
                        user_exception=UserException(
                            UserExceptionType.Authorization,
                            intervention_required=True))
                raise APIException("Unable to retrieve activity list " +
                                   str(response))
            data = response.json()

            if "error" in data and data["error"]["type"] == "AUTH_FAILED":
                raise APIException(
                    "No authorization to retrieve activity list",
                    block=True,
                    user_exception=UserException(
                        UserExceptionType.Authorization,
                        intervention_required=True))

            track_ids = []
            this_page_activities = []
            for act in data["data"]:
                startTime = pytz.utc.localize(
                    datetime.strptime(act["start_time"],
                                      "%Y-%m-%d %H:%M:%S UTC"))
                if earliestDate is None or startTime < earliestDate:  # probably redundant, I would assume it works out the TZes...
                    earliestDate = startTime
                logger.debug("activity pre")
                if "tracking" in act and act["tracking"]:
                    logger.warning("\t tracking")
                    exclusions.append(
                        APIExcludeActivity("In progress",
                                           activityId=act["id"],
                                           permanent=False))
                    continue  # come back once they've completed the activity
                track_ids.append(act["id"])
                activity = UploadedActivity()
                activity.StartTime = startTime
                activity.EndTime = activity.StartTime + timedelta(
                    0, round(act["duration_sec"]))
                logger.debug("\tActivity s/t " + str(activity.StartTime))

                activity.Stationary = not act["has_points"]

                if int(act["sport"]) in self._activityMappings:
                    activity.Type = self._activityMappings[int(act["sport"])]
                activity.ServiceData = {"ActivityID": act["id"]}

                this_page_activities.append(activity)
            cached_track_tzs = cachedb.endomondo_activity_cache.find(
                {"TrackID": {
                    "$in": track_ids
                }})
            cached_track_tzs = dict([(x["TrackID"], x)
                                     for x in cached_track_tzs])
            logger.debug("Have" + str(len(cached_track_tzs.keys())) + "/" +
                         str(len(track_ids)) + " cached TZ records")

            for activity in this_page_activities:
                # attn service makers: why #(*%$ can't you all agree to use naive local time. So much simpler.
                cachedTrackData = None
                track_id = activity.ServiceData["ActivityID"]

                if track_id not in cached_track_tzs:
                    logger.debug("\t Resolving TZ for %s" % activity.StartTime)
                    cachedTrackData = self._downloadRawTrackRecord(
                        serviceRecord, track_id)
                    try:
                        self._populateActivityFromTrackData(
                            activity, cachedTrackData, minimumWaypoints=True)
                    except APIExcludeActivity as e:
                        e.ExternalActivityID = track_id
                        logger.info("Encountered APIExcludeActivity %s" %
                                    str(e))
                        exclusions.append(e)
                        continue

                    if not activity.TZ and not activity.Stationary:
                        logger.info("Couldn't determine TZ")
                        exclusions.append(
                            APIExcludeActivity("Couldn't determine TZ",
                                               activityId=track_id))
                        continue
                    cachedTrackRecord = {
                        "Owner": serviceRecord.ExternalID,
                        "TrackID": track_id,
                        "TZ": pickle.dumps(activity.TZ),
                        "StartTime": activity.StartTime
                    }
                    cachedb.endomondo_activity_cache.insert(cachedTrackRecord)
                elif not activity.Stationary:
                    activity.TZ = pickle.loads(
                        cached_track_tzs[track_id]["TZ"])
                    activity.AdjustTZ()  # Everything returned is in UTC

                activity.Laps = []
                if int(act["sport"]) in self._activityMappings:
                    activity.Type = self._activityMappings[int(act["sport"])]

                activity.ServiceData = {
                    "ActivityID": act["id"],
                    "ActivityData": cachedTrackData
                }
                activity.CalculateUID()
                activities.append(activity)

            if not paged:
                earliestFirstPageDate = earliestDate
            if not exhaustive or ("more" in data and data["more"] is False):
                break
            else:
                paged = True
        return activities, exclusions