def test_insertList(): ##Tests to see if multiple Statements can be inserted tc = TinCan(USER_NAME,PASSWORD,ENDPOINT) statement1Id= str(uuid.uuid1()) statement2Id= str(uuid.uuid1()) email1 = 'mailto:[email protected]' email2 = 'mailto:[email protected]' email3 = 'mailto:[email protected]' x={'mbox':[email1]} y={'mbox':[email2,email3]} now = datetime.datetime.now() ##Builds statement list statementList = [{ "id": statement1Id, 'actor':{'name':['List Test1'],'mbox':[email1]}, 'verb':'passed', 'object':{'id':str(uuid.uuid1()),'definition':{'name':{"en-US":'you'},'description':{"en-US":'Testing list insertions[1] of statements.'}}} }, { "id": statement2Id, 'actor':{'name':['List Test2'],'mbox':[email2, email3]}, 'verb':'failed', 'object':{'id':str(uuid.uuid1()),'definition':{'name':{"en-US":'you'},'description':{"en-US":'Testing list insertions[1] of statements.'}}} }] tc.submitStatementList(statementList) time.sleep(10) ## Fetches previously entered statements state1 = tc.getStatementbyID(statement1Id) state2 = tc.getStatementbyID(statement2Id) ##Checks to see if the IDs are the same from inserted and retrieved assert state1['id'] == statement1Id assert state2['id'] == statement2Id
def testComplexFilter(): ##Ensures a complex filter returns correct data tc = TinCan(USER_NAME,PASSWORD,ENDPOINT) whoDid = "Complex Test" statement_ID = str(uuid.uuid1()) whoDidEmail = "*****@*****.**" whoDidObject = 'Test Subject' didWhat = 'attempted' now = datetime.datetime.now() x={'mbox':['mailto:'+whoDidEmail]} ##Inserts object to filter for createdJsonObject = { 'id': statement_ID, 'actor':{'name':[whoDid],'mbox':['mailto:'+whoDidEmail]}, 'verb':didWhat, 'object':{'id':str(uuid.uuid1()),'definition':{'name':{"en-US":'with '+whoDidObject},'description':{"en-US":'Inserting a description for a complex filter.'}}} } ##Inserts statement tc.submitStatement(createdJsonObject) time.sleep(8) results = tc.getFilteredStatements(_verb=didWhat,_actor=x,_limit=1,_sparse=True,_since=str(now)) for result in results['statements']: if result['verb']==didWhat: for x in result['actor']['mbox']: assert (x == 'mailto:'+whoDidEmail.lower())
def test_filter_statements(): ##Ensures a simple filter request returns the correct information tc = TinCan(USER_NAME,PASSWORD,ENDPOINT) test_verb = 'created' email = '*****@*****.**' x={'mbox':['mailto:'+email]} results = tc.getFilteredStatements(_actor=x,_limit=5, _sparse=True) for result in results['statements']: for emails in result['actor']['mbox']: if (emails=='mailto:'+email.lower()) : assert True
def get_for_video_and_user_data(video, user_data, insert_if_missing=False): if not user_data: return None key = UserVideo.get_key_name(video, user_data) user_video = UserVideo.get_by_key_name(key) if user_video: return user_video elif insert_if_missing: TinCan.create_media(user_data, "launched", video) return UserVideo.get_or_insert( key_name=key, user=user_data.user, video=video, duration=video.duration) else: return None
def test_submit_statement(): ##Tests to see if a single statement can be inserted tc = TinCan(USER_NAME,PASSWORD,ENDPOINT) statement_ID = str(uuid.uuid1()) whoDid = "I" whoDidEmail = "*****@*****.**" whoDidObject = 'you' didWhat = 'created' ##Build the Tin Can statement createdJsonObject ={ "id": statement_ID, 'actor':{'name':[whoDid],'mbox':['mailto:'+whoDidEmail]}, 'verb':didWhat, 'object':{'id':str(uuid.uuid1()),'definition':{'name':{"en-US":whoDidObject},'description':{"en-US":'Testing single insertion of a statement.'}}} } tc.submitStatement(createdJsonObject) ##Sleeps to insure the statement is inserted time.sleep(10) ##Gets the statement by the ID used when inserted testStatement = tc.getStatementbyID(statement_ID) ##Ensures the ID when inserted and the ID on the retrieve are the same assert (testStatement['id'] == statement_ID)
def test_GetAllStatements(): tc = TinCan(USER_NAME,PASSWORD,ENDPOINT) statementlist = tc.getAllStatements() print statementlist for x in statementlist['statements']: assert 'id' in x
def add_entry(user_data, video, seconds_watched, last_second_watched, detect_cheat=True): # TODO(csilvers): get rid of circular dependency here import badges.last_action_cache user_video = UserVideo.get_for_video_and_user_data(video, user_data, insert_if_missing=True) # Cap seconds_watched at duration of video seconds_watched = max(0, min(seconds_watched, video.duration)) video_points_previous = points.VideoPointCalculator(user_video) action_cache = badges.last_action_cache.LastActionCache.get_for_user_data(user_data) last_video_log = action_cache.get_last_video_log() # If the last video logged is not this video and the times being credited # overlap, don't give points for this video. Can only get points for one video # at a time. if (detect_cheat and last_video_log and last_video_log.key_for_video() != video.key()): dt_now = datetime.datetime.now() other_video_time = last_video_log.time_watched this_video_time = dt_now - datetime.timedelta(seconds=seconds_watched) if other_video_time > this_video_time: logging.warning("Detected overlapping video logs " + "(user may be watching multiple videos?)") return (None, None, 0, False) video_log = VideoLog() video_log.user = user_data.user video_log.video = video video_log.video_title = video.title video_log.youtube_id = video.youtube_id video_log.seconds_watched = seconds_watched video_log.last_second_watched = last_second_watched if seconds_watched > 0: # TODO(csilvers): get rid of circular dependencies here import badges.util_badges import topic_models if user_video.seconds_watched == 0: gae_bingo.gae_bingo.bingo([ "video_started_binary", # Core metric "video_started_count",]) # Core metric user_data.uservideocss_version += 1 UserVideoCss.set_started(user_data, user_video.video, user_data.uservideocss_version) user_video.seconds_watched += seconds_watched user_data.total_seconds_watched += seconds_watched # Update seconds_watched of all associated topics video_topics = db.get(video.topic_string_keys) first_topic = True for topic in video_topics: user_topic = topic_models.UserTopic.get_for_topic_and_user_data(topic, user_data, insert_if_missing=True) user_topic.title = topic.standalone_title user_topic.seconds_watched += seconds_watched user_topic.last_watched = datetime.datetime.now() user_topic.put() video_log.playlist_titles.append(user_topic.title) if first_topic: action_cache.push_video_log(video_log) badges.util_badges.update_with_user_topic( user_data, user_topic, include_other_badges=first_topic, action_cache=action_cache) first_topic = False user_video.last_second_watched = last_second_watched user_video.last_watched = datetime.datetime.now() user_video.duration = video.duration user_data.record_activity(user_video.last_watched) video_points_total = points.VideoPointCalculator(user_video) video_points_received = video_points_total - video_points_previous just_finished_video = False if not user_video.completed and video_points_total >= consts.VIDEO_POINTS_BASE: just_finished_video = True user_video.completed = True user_data.videos_completed = -1 TinCan.create_media(user_data, "completed", video, user_video) user_data.uservideocss_version += 1 UserVideoCss.set_completed(user_data, user_video.video, user_data.uservideocss_version) gae_bingo.gae_bingo.bingo([ 'struggling_videos_finished', 'video_completed_binary', # Core metric 'video_completed_count' # Core metric ]) video_log.is_video_completed = user_video.completed goals_updated = goals.models.GoalList.update_goals(user_data, lambda goal: goal.just_watched_video(user_data, user_video, just_finished_video)) if video_points_received > 0: video_log.points_earned = video_points_received user_data.add_points(video_points_received) TinCan.create_media(user_data, "progressed", video, user_video) db.put([user_video, user_data]) # Defer the put of VideoLog for now, as we think it might be causing hot tablets # and want to shift it off to an automatically-retrying task queue. # http://ikaisays.com/2011/01/25/app-engine-datastore-tip-monotonically-increasing-values-are-bad/ deferred.defer(_commit_video_log, video_log, _queue="video-log-queue", _url="/_ah/queue/deferred_videolog") if user_data is not None and user_data.coaches: # Making a separate queue for the log summaries so we can clearly see how much they are getting used deferred.defer(commit_log_summary_coaches, video_log, user_data.coaches, _queue="log-summary-queue", _url="/_ah/queue/deferred_log_summary") return (user_video, video_log, video_points_total, goals_updated)