def auth_persist_extended_auth_ajax(req, service): svc = Service.FromID(service) svcId = [ x["ID"] for x in req.user["ConnectedServices"] if x["Service"] == svc.ID ] if len(svcId) == 0: return HttpResponse(status=404) else: svcId = svcId[0] svcRec = Service.GetServiceRecordByID(svcId) if svcRec.HasExtendedAuthorizationDetails(): Service.PersistExtendedAuthDetails(svcRec) return HttpResponse()
def auth_disconnect_do(req, service): svc = Service.FromID(service) svcId = [ x["ID"] for x in req.user["ConnectedServices"] if x["Service"] == svc.ID ] if len(svcId) == 0: return else: svcId = svcId[0] svcRec = Service.GetServiceRecordByID(svcId) Service.DeleteServiceRecord(svcRec) User.DisconnectService(svcRec) return True
def RefreshPaymentStateForExternalIDs(self, external_ids): from tapiriik.services import Service, ServiceRecord external_ids = [str(x) for x in external_ids] connections = [ ServiceRecord(x) for x in db.connections.find({ "Service": "motivato", "ExternalID": { "$in": external_ids } }) ] users = db.users.find( {"ConnectedServices.ID": { "$in": [x._id for x in connections] }}) for user in users: my_connection = [ x for x in connections if x._id in [y["ID"] for y in user["ConnectedServices"]] ][0] # Defer to the actual service module, where all the session stuff is set up state = Service.FromID("motivato")._getPaymentState(my_connection) self.ApplyPaymentState(user, state, my_connection.ExternalID, duration=None)
def sync_trigger_partial_sync_callback(req, service): svc = Service.FromID(service) if req.method == "POST": # if whe're using decathlon services, force resync # Get users ids list, depending of services response = svc.ExternalIDsForPartialSyncTrigger(req) _sync = Sync() # Get users _id list from external ID users_to_sync = _sync.getUsersIDFromExternalId(response, service) if not users_to_sync: return HttpResponse(status=401) else: for user in users_to_sync: # For each users, if we can sync now if "LastSynchronization" in user and user["LastSynchronization"] is not None and datetime.utcnow() - \ user["LastSynchronization"] < _sync.MinimumSyncInterval: return HttpResponse(status=403) exhaustive = None if "LastSynchronization" in user and user["LastSynchronization"] is not None and datetime.utcnow() - \ user["LastSynchronization"] > _sync.MaximumIntervalBeforeExhaustiveSync: exhaustive = True # Force immadiate sync _sync.ScheduleImmediateSync(user, exhaustive) return HttpResponse(status=204) elif req.method == "GET": return svc.PartialSyncTriggerGET(req) else: return HttpResponse(status=400)
def privacy(request): OPTIN = "<span class=\"optin policy\">Opt-in</span>" NO = "<span class=\"no policy\">No</span>" YES = "<span class=\"yes policy\">Yes</span>" CACHED = "<span class=\"cached policy\">Cached</span>" SEEBELOW = "See below" services = dict([[x.ID, {"DisplayName": x.DisplayName, "ID": x.ID}] for x in Service.List() if x.ID not in WITHDRAWN_SERVICES]) services["garminconnect"].update({"email": OPTIN, "password": OPTIN, "tokens": NO, "metadata": YES, "data":NO}) services["strava"].update({"email": NO, "password": NO, "tokens": YES, "metadata": YES, "data":NO}) services["sporttracks"].update({"email": NO, "password": NO, "tokens": YES, "metadata": YES, "data":NO}) services["dropbox"].update({"email": NO, "password": NO, "tokens": YES, "metadata": YES, "data":CACHED}) services["runkeeper"].update({"email": NO, "password": NO, "tokens": YES, "metadata": YES, "data":NO}) services["rwgps"].update({"email": OPTIN, "password": OPTIN, "tokens": NO, "metadata": YES, "data":NO}) services["trainingpeaks"].update({"email": OPTIN, "password": OPTIN, "tokens": NO, "metadata": YES, "data":NO}) services["endomondo"].update({"email": NO, "password": NO, "tokens": YES, "metadata": YES, "data":NO}) services["motivato"].update({"email": OPTIN, "password": OPTIN, "tokens": NO, "metadata": YES, "data":NO}) for svc_id in SOFT_LAUNCH_SERVICES: if svc_id in services: del services[svc_id] def user_services_sort(service): if not request.user: return 0 if User.IsServiceConnected(request.user, service["ID"]): return 0 else: return 1 services_list = sorted(services.values(), key=user_services_sort) return render(request, "privacy.html", {"services": services_list})
def auth_do(req, service): svc = Service.FromID(service) from tapiriik.services.api import APIException try: if svc.RequiresExtendedAuthorizationDetails: uid, authData, extendedAuthData = svc.Authorize( req.POST["username"], req.POST["password"]) else: uid, authData = svc.Authorize(req.POST["username"], req.POST["password"]) except APIException as e: if e.UserException is not None: return { "type": e.UserException.Type, "extra": e.UserException.Extra } return False if authData is not None: serviceRecord = Service.EnsureServiceRecordWithAuth( svc, uid, authData, extendedAuthDetails=extendedAuthData if svc.RequiresExtendedAuthorizationDetails else None, persistExtendedAuthDetails=bool(req.POST.get("persist", None))) # auth by this service connection existingUser = User.AuthByService(serviceRecord) # only log us in as this different user in the case that we don't already have an account if existingUser is not None and req.user is None: User.Login(existingUser, req) else: User.Ensure(req) # link service to user account, possible merge happens behind the scenes (but doesn't effect active user) User.ConnectService(req.user, serviceRecord) # TODO do other way. May be possible to achieve during LocalService auth? # restrict sync to primary server to ensure data is accessible by the web server # in case we are connecting local exporter if PRIMARY_HOST_NAME: db.users.update({"ConnectedServices.Service": service}, { "$set": { "SynchronizationHostRestriction": PRIMARY_HOST_NAME } }) return True return False
def privacy(request): OPTIN = "<span class=\"optin policy\">Opt-in</span>" NO = "<span class=\"no policy\">No</span>" YES = "<span class=\"yes policy\">Yes</span>" CACHED = "<span class=\"cached policy\">Cached</span>" SEEBELOW = "See below" services = dict([[x.ID, { "DisplayName": x.DisplayName, "ID": x.ID }] for x in Service.List()]) services["garminconnect"].update({ "email": OPTIN, "password": OPTIN, "tokens": NO, "metadata": YES, "data": NO }) services["strava"].update({ "email": NO, "password": NO, "tokens": YES, "metadata": YES, "data": NO }) services["sporttracks"].update({ "email": OPTIN, "password": OPTIN, "tokens": NO, "metadata": YES, "data": NO }) services["endomondo"].update({ "email": NO, "password": NO, "tokens": YES, "metadata": YES, "data": NO }) services["dropbox"].update({ "email": NO, "password": NO, "tokens": YES, "metadata": YES, "data": CACHED }) services["runkeeper"].update({ "email": NO, "password": NO, "tokens": YES, "metadata": YES, "data": NO }) services_list = [x for key, x in services.items()] return render(request, "privacy.html", {"services": services_list})
def authreturn(req, service, level=None): rc_token = req.GET.get('rc_token') if rc_token is None: return redirect("https://app.runnersconnect.net") rc_user = User.EnsureWithRcToken(req, rc_token) rc_uid, rc_authData, rc_extendedAuthData = (rc_token, {}, {"token": rc_token}) rc_serviceRecord = Service.EnsureServiceRecordWithAuth(RunnersConnectService, rc_uid, rc_authData, rc_extendedAuthData, True) User.ConnectService(rc_user, rc_serviceRecord) logger.info("Auto logged user %s " % (req.user['rc_token'])) if ("error" in req.GET or "not_approved" in req.GET): success = False else: svc = Service.FromID(service) try: uid, authData = svc.RetrieveAuthorizationToken(req, level) except Exception as e: logger.info("Errrrr %s " % (str(e))) return render(req, "oauth-failure.html", { "service": svc, "error": str(e) }) serviceRecord = Service.EnsureServiceRecordWithAuth(svc, uid, authData) # auth by this service connection # we've already created and logged in user with rc token #existingUser = User.AuthByService(serviceRecord) # only log us in as this different user in the case that we don't already have an account #if req.user is None and existingUser is not None: # User.Login(existingUser, req) #else: # User.Ensure(req) # link service to user account, possible merge happens behind the scenes (but doesn't effect active user) User.ConnectService(req.user, serviceRecord) success = True #return render(req, "oauth-return.html", {"success": 1 if success else 0}) connectedServices = [s["Service"] for s in req.user['ConnectedServices']] logger.info("connected services %s " % (connectedServices)) return HttpResponse(json.dumps({"success": success == True, "user": req.user["rc_token"], "connectedServices": connectedServices}), content_type='application/json')
def auth_login(req, service): if "password" in req.POST: res = auth_do(req, service) if res: return redirect("dashboard") return render(req, "auth/login.html", { "serviceid": service, "service": Service.FromID(service) })
def sync_trigger_partial_sync_callback(req, service): from sync_remote_triggers import trigger_remote svc = Service.FromID(service) affected_connection_external_ids = svc.ExternalIDsForPartialSyncTrigger( req) trigger_remote.apply_async( args=[service, affected_connection_external_ids]) return HttpResponse(status=204)
def test_svc_level_dupe(self): ''' check that service-level duplicate activities are caught (no DB involvement) ''' svcA, svcB = TestTools.create_mock_services() actA = Activity() actA.StartTime = datetime(1, 2, 3, 4, 5, 6, 7) actA.UploadedTo = [TestTools.create_mock_upload_record(svcA)] actB = Activity() actB.StartTime = actA.StartTime actB.UploadedTo = [TestTools.create_mock_upload_record(svcB)] actA.CalculateUID() actB.CalculateUID() activities = [] Sync._accumulateActivities(Service.FromID("mockA"), [actA], activities) Sync._accumulateActivities(Service.FromID("mockB"), [actB], activities) self.assertEqual(len(activities), 1)
def js_bridge(req): serviceInfo = {} for svc in Service.List(): if svc.ID in WITHDRAWN_SERVICES: continue if req.user is not None: svcRec = User.GetConnectionRecord( req.user, svc.ID) # maybe make the auth handler do this only once? else: svcRec = None info = { "DisplayName": svc.DisplayName, "DisplayAbbreviation": svc.DisplayAbbreviation, "AuthenticationType": svc.AuthenticationType, "UsesExtendedAuth": svc.RequiresExtendedAuthorizationDetails, "AuthorizationURL": svc.UserAuthorizationURL, "NoFrame": svc.AuthenticationNoFrame, "ReceivesActivities": svc.ReceivesActivities, "Configurable": svc.Configurable, "RequiresConfiguration": False # by default } if svcRec: if svc.Configurable: if svc.ID == "dropbox": # dirty hack alert, but better than dumping the auth details in their entirety info["AccessLevel"] = "full" if svcRec.Authorization[ "Full"] else "normal" info["RequiresConfiguration"] = svc.RequiresConfiguration( svcRec) info["Config"] = svcRec.GetConfiguration() info["HasExtendedAuth"] = svcRec.HasExtendedAuthorizationDetails() info[ "PersistedExtendedAuth"] = svcRec.HasExtendedAuthorizationDetails( persisted_only=True) info["ExternalID"] = svcRec.ExternalID info["BlockFlowTo"] = [] info["Connected"] = svcRec is not None serviceInfo[svc.ID] = info if req.user is not None: flowExc = User.GetFlowExceptions(req.user) for exc in flowExc: if exc["Source"]["Service"] not in serviceInfo or exc["Target"][ "Service"] not in serviceInfo: continue # Withdrawn services if "ExternalID" in serviceInfo[exc["Source"][ "Service"]] and exc["Source"]["ExternalID"] != serviceInfo[ exc["Source"]["Service"]]["ExternalID"]: continue # this is an old exception for a different connection if "ExternalID" in serviceInfo[exc["Target"][ "Service"]] and exc["Target"]["ExternalID"] != serviceInfo[ exc["Target"]["Service"]]["ExternalID"]: continue # same as above serviceInfo[exc["Source"]["Service"]]["BlockFlowTo"].append( exc["Target"]["Service"]) return {"js_bridge_serviceinfo": json.dumps(serviceInfo)}
def test_svc_level_dupe_tz_nonuniform(self): ''' check that service-level duplicate activities with non-uniform TZs are caught ''' svcA, svcB = TestTools.create_mock_services() actA = Activity() actA.StartTime = datetime(1, 2, 3, 4, 5, 6, 7) actA.UploadedTo = [TestTools.create_mock_upload_record(svcA)] actB = Activity() actB.StartTime = pytz.timezone("America/Denver").localize( actA.StartTime) actB.UploadedTo = [TestTools.create_mock_upload_record(svcB)] actA.CalculateUID() actB.CalculateUID() activities = [] Sync._accumulateActivities(Service.FromID("mockA"), [actA], activities) Sync._accumulateActivities(Service.FromID("mockB"), [actB], activities) self.assertEqual(len(activities), 1)
def trigger_poll(service_id, index): from tapiriik.auth import User print("Polling %s-%d" % (service_id, index)) svc = Service.FromID(service_id) affected_connection_external_ids = svc.PollPartialSyncTrigger(index) print("Triggering %d connections via %s-%d" % (len(affected_connection_external_ids), service_id, index)) # MONGO_FULL_WRITE_CONCERN because there was a race where users would get picked for synchronization before their service record was updated on the correct secondary # So it'd think the service wasn't triggered db.connections.update_many( { "Service": service_id, "ExternalID": { "$in": affected_connection_external_ids } }, { "$set": { "TriggerPartialSync": True, "TriggerPartialSyncTimestamp": datetime.utcnow() } }, w=MONGO_FULL_WRITE_CONCERN) affected_connection_ids = db.connections.find( { "Service": svc.ID, "ExternalID": { "$in": affected_connection_external_ids } }, {"_id": 1}) affected_connection_ids = [x["_id"] for x in affected_connection_ids] trigger_users_query = User.PaidUserMongoQuery() trigger_users_query.update( {"ConnectedServices.ID": { "$in": affected_connection_ids }}) trigger_users_query.update({"Config.suppress_auto_sync": {"$ne": True}}) db.users.update_many( trigger_users_query, {"$set": { "NextSynchronization": datetime.utcnow() }} ) # It would be nicer to use the Sync.Schedule... method, but I want to cleanly do this in bulk db.poll_stats.insert_one({ "Service": service_id, "Index": index, "Timestamp": datetime.utcnow(), "TriggerCount": len(affected_connection_external_ids) })
def authreturn(req, service, level=None): if ("error" in req.GET or "not_approved" in req.GET): success = False else: svc = Service.FromID(service) uid, authData = svc.RetrieveAuthorizationToken(req, level) serviceRecord = Service.EnsureServiceRecordWithAuth(svc, uid, authData) # auth by this service connection existingUser = User.AuthByService(serviceRecord) # only log us in as this different user in the case that we don't already have an account if req.user is None and existingUser is not None: User.Login(existingUser, req) else: User.Ensure(req) # link service to user account, possible merge happens behind the scenes (but doesn't effect active user) User.ConnectService(req.user, serviceRecord) success = True return render(req, "oauth-return.html", {"success": 1 if success else 0})
def test_svc_level_dupe_tz_irregular(self): ''' check that service-level duplicate activities with irregular TZs are caught ''' svcA, svcB = TestTools.create_mock_services() actA = Activity() actA.StartTime = pytz.timezone("America/Edmonton").localize( datetime(1, 2, 3, 4, 5, 6, 7)) actA.UploadedTo = [TestTools.create_mock_upload_record(svcA)] actB = Activity() actB.StartTime = actA.StartTime.astimezone( pytz.timezone("America/Iqaluit")) actB.UploadedTo = [TestTools.create_mock_upload_record(svcB)] actA.CalculateUID() actB.CalculateUID() activities = [] Sync._accumulateActivities(Service.FromID("mockA"), [actA], activities) Sync._accumulateActivities(Service.FromID("mockB"), [actB], activities) self.assertEqual(len(activities), 1)
def sync_trigger_partial_sync_callback(req, service): svc = Service.FromID(service) if req.method == "POST": from sync_remote_triggers import trigger_remote affected_connection_external_ids = svc.ExternalIDsForPartialSyncTrigger(req) trigger_remote.apply_async(args=[service, affected_connection_external_ids]) return HttpResponse(status=204) elif req.method == "GET": return svc.PartialSyncTriggerGET(req) else: return HttpResponse(status=400)
def auth_disconnect(req, service): if not req.user: return redirect("dashboard") if "action" in req.POST: if req.POST["action"] == "disconnect": auth_disconnect_do(req, service) return redirect("dashboard") return render(req, "auth/disconnect.html", { "serviceid": service, "service": Service.FromID(service) })
def sync_status_rc(req): token = req.GET.get('token') if token is None: return HttpResponse(status=403) user = User.EnsureWithRcToken(req, token) uid, authData, extendedAuthData = (token, {}, {"token": token}) serviceRecord = Service.EnsureServiceRecordWithAuth(RunnersConnectService, uid, authData, extendedAuthData, True) User.ConnectService(user, serviceRecord) return sync_status(req)
def auth_rc(req): token = req.GET.get('token') if token is None: return redirect("https://app.runnersconnect.net") user = User.EnsureWithRcToken(req, token) uid, authData, extendedAuthData = (token, {}, {"token": token}) serviceRecord = Service.EnsureServiceRecordWithAuth(RunnersConnectService, uid, authData, extendedAuthData, True) User.ConnectService(user, serviceRecord) return render(req, "dashboard.html")
def test_svc_level_dupe_time_leeway(self): ''' check that service-level duplicate activities within the defined time leeway are caught ''' svcA, svcB = TestTools.create_mock_services() actA = Activity() actA.StartTime = datetime(1, 2, 3, 4, 5, 6, 7) actA.UploadedTo = [TestTools.create_mock_upload_record(svcA)] actA.Type = set(svcA.SupportedActivities).intersection( set(svcB.SupportedActivities)).pop() actB = Activity() actB.StartTime = datetime(1, 2, 3, 4, 6, 6, 7) actB.UploadedTo = [TestTools.create_mock_upload_record(svcB)] actB.Type = actA.Type actA.CalculateUID() actB.CalculateUID() activities = [] Sync._accumulateActivities(Service.FromID("mockA"), [actA], activities) Sync._accumulateActivities(Service.FromID("mockB"), [actB], activities) self.assertIn(actA.UID, actA.UIDs) self.assertIn(actB.UID, actA.UIDs) self.assertIn(actA.UID, actB.UIDs) self.assertIn(actB.UID, actB.UIDs) # we need to fake up the service records to avoid having to call the actual sync method where these values are normally preset recA = TestTools.create_mock_svc_record(svcA) recB = TestTools.create_mock_svc_record(svcB) recA.SynchronizedActivities = [actA.UID] recB.SynchronizedActivities = [actB.UID] recipientServicesA = Sync._determineRecipientServices( actA, [recA, recB]) recipientServicesB = Sync._determineRecipientServices( actB, [recA, recB]) self.assertEqual(len(recipientServicesA), 0) self.assertEqual(len(recipientServicesB), 0) self.assertEqual(len(activities), 1)
def json(self): # Augment with the requisite URLs self.ActivityURLs = {svc: {} for svc in self.PendingDeletions.keys()} for svc_id, urls in self.ActivityURLs.items(): svc = Service.FromID(svc_id) for upload in self.PendingDeletions[svc_id]: try: urls[upload] = svc.UserUploadedActivityURL(upload) except NotImplementedError: pass self.PendingDeletionCount = sum([len(v) for k, v in self.PendingDeletions.items()]) dthandler = lambda obj: obj.isoformat() if isinstance(obj, datetime.datetime) or isinstance(obj, datetime.date) else str(obj) return json.dumps(self.__dict__, default=dthandler)
def test_activity_deduplicate_normaltz(self): ''' ensure that we can't deduplicate activities with non-pytz timezones ''' svcA, svcB = TestTools.create_mock_services() actA = TestTools.create_random_activity(svcA, tz=UTC()) actB = Activity() actB.StartTime = actA.StartTime.replace(tzinfo=None) + timedelta( seconds=10) actB.EndTime = actA.EndTime.replace(tzinfo=None) actB.UploadedTo = [TestTools.create_mock_upload_record(svcB)] actA.Name = "Not this" actB.Name = "Heya" actB.Type = ActivityType.Walking actA.CalculateUID() actB.CalculateUID() activities = [] Sync._accumulateActivities(Service.FromID("mockB"), [copy.deepcopy(actB)], activities) self.assertRaises(ValueError, Sync._accumulateActivities, Service.FromID("mockA"), [copy.deepcopy(actA)], activities)
def test_constant_representation(self): ''' ensures that all services' API clients are consistent through a simulated download->upload cycle ''' # runkeeper rkSvc = Service.FromID("runkeeper") act = TestTools.create_random_activity(rkSvc, rkSvc.SupportedActivities[0]) record = rkSvc._createUploadData(act) returnedAct = rkSvc._populateActivity(record) act.Name = None # RK doesn't have a "name" field, so it's fudged into the notes, but not really rkSvc._populateActivityWaypoints(record, returnedAct) self.assertActivitiesEqual(returnedAct, act) # can't test Strava well this way, the upload and download formats are entirely different # endomondo - only waypoints at this point, the activity metadata is somewhat out-of-band eSvc = Service.FromID("endomondo") act = TestTools.create_random_activity(eSvc, eSvc.SupportedActivities[0]) oldWaypoints = act.Waypoints self.assertEqual(oldWaypoints[0].Calories, None) record = eSvc._createUploadData(act) eSvc._populateActivityFromTrackData(act, record) self.assertEqual(oldWaypoints, act.Waypoints)
def browse(req, path="/"): if req.user is None: return HttpResponse(status=403) svcRec = User.GetConnectionRecord(req.user, "dropbox") dbSvc = Service.FromID("dropbox") dbCl = dbSvc._getClient(svcRec) metadata = dbCl.metadata(path) folders = [] for item in metadata["contents"]: if item["is_dir"] is False: continue folders.append(item["path"]) return HttpResponse(json.dumps(folders), content_type='application/json')
def auth_do(req, service): svc = Service.FromID(service) from tapiriik.services.api import APIException try: if svc.RequiresExtendedAuthorizationDetails: uid, authData, extendedAuthData = svc.Authorize( req.POST["username"], req.POST["password"]) else: uid, authData = svc.Authorize(req.POST["username"], req.POST["password"]) except APIException as e: if e.UserException is not None: return { "type": e.UserException.Type, "extra": e.UserException.Extra } return False if authData is not None: serviceRecord = Service.EnsureServiceRecordWithAuth( svc, uid, authData, extendedAuthDetails=extendedAuthData if svc.RequiresExtendedAuthorizationDetails else None, persistExtendedAuthDetails=bool(req.POST.get("persist", None))) # auth by this service connection existingUser = User.AuthByService(serviceRecord) # only log us in as this different user in the case that we don't already have an account if existingUser is not None and req.user is None: User.Login(existingUser, req) else: User.Ensure(req) # link service to user account, possible merge happens behind the scenes (but doesn't effect active user) User.ConnectService(req.user, serviceRecord) return True return False
def trigger_remote(service_id, affected_connection_external_ids_with_payloads): from tapiriik.auth import User from tapiriik.services import Service svc = Service.FromID(service_id) affected_connection_ids = list() for item in affected_connection_external_ids_with_payloads: if isinstance(item, list): external_id, payload = item else: external_id = item payload = None update_connection_query = { "$set": { "TriggerPartialSync": True, "TriggerPartialSyncTimestamp": datetime.utcnow() } } if payload is not None: update_connection_query.update({ "$push": { "TriggerPartialSyncPayloads": payload, "$slice": -90 } }) record = db.connections.find_and_modify( { "Service": svc.ID, "ExternalID": external_id }, update_connection_query, w=MONGO_FULL_WRITE_CONCERN) if record is not None: affected_connection_ids.append(record["_id"]) trigger_users_query = User.PaidUserMongoQuery() trigger_users_query.update( {"ConnectedServices.ID": { "$in": affected_connection_ids }}) trigger_users_query.update({"Config.suppress_auto_sync": {"$ne": True}}) db.users.update( trigger_users_query, {"$set": { "NextSynchronization": datetime.utcnow() }}, multi=True ) # It would be nicer to use the Sync.Schedule... method, but I want to cleanly do this in bulk
def test_constant_representation_rk(self): ''' ensures that all services' API clients are consistent through a simulated download->upload cycle ''' # runkeeper rkSvc = Service.FromID("runkeeper") act = TestTools.create_random_activity(rkSvc, rkSvc.SupportedActivities[0], withLaps=False) record = rkSvc._createUploadData(act) record[ "has_path"] = act.GPS # RK helpfully adds a "has_path" entry if we have waypoints. returnedAct = rkSvc._populateActivity(record) act.Name = None # RK doesn't have a "name" field, so it's fudged into the notes, but not really rkSvc._populateActivityWaypoints(record, returnedAct) # RK deliberately doesn't set timezone.. returnedAct.EnsureTZ() self.assertActivitiesEqual(returnedAct, act)
def schedule_trigger_poll(): schedule_data = list(db.trigger_poll_scheduling.find()) print("Scheduler run at %s" % datetime.now()) for svc in Service.List(): if svc.PartialSyncTriggerRequiresPolling: print("Checking %s's %d poll indexes" % (svc.ID, svc.PartialSyncTriggerPollMultiple)) for idx in range(svc.PartialSyncTriggerPollMultiple): svc_schedule = [x for x in schedule_data if x["Service"] == svc.ID and x["Index"] == idx] if not svc_schedule: svc_schedule = {"Service": svc.ID, "Index": idx, "LastScheduled": datetime.min} else: svc_schedule = svc_schedule[0] if datetime.utcnow() - svc_schedule["LastScheduled"] > svc.PartialSyncTriggerPollInterval: svc_schedule["LastScheduled"] = datetime.utcnow() trigger_poll.apply_async(args=[svc.ID, idx], expires=svc.PartialSyncTriggerPollInterval.total_seconds(), time_limit=svc.PartialSyncTriggerPollInterval.total_seconds()) db.trigger_poll_scheduling.update({"Service": svc.ID, "Index": idx}, svc_schedule, upsert=True)
def trigger_poll(service_id, index): svc = Service.FromID(service_id) affected_connection_ids = svc.PollPartialSyncTrigger(index) print("Triggering %d connections via %s-%d" % (len(affected_connection_ids), service_id, index)) db.connections.update({"_id": { "$in": affected_connection_ids }}, {"$set": { "TriggerPartialSync": True }}, multi=True) db.poll_stats.insert({ "Service": service_id, "Index": index, "Timestamp": datetime.utcnow(), "TriggerCount": len(affected_connection_ids) })