def generate_fake_coachreport_logs(password="******"): try: t = FacilityUser.objects.get(facility=Facility.objects.all()[0], username=random.choice(firstnames)) except FacilityUser.DoesNotExist as e: t = FacilityUser(facility=Facility.objects.all()[0], username=random.choice(firstnames)) t.set_password(password) t.save() # TODO: create flags later num_logs = 20 logs = [] for _ in xrange(num_logs): date_logged_in = datetime.datetime.now() - datetime.timedelta(days=random.randint(1, 10)) date_viewed_coachreport = date_logged_in + datetime.timedelta(minutes=random.randint(0, 30)) date_logged_out = date_viewed_coachreport + datetime.timedelta(minutes=random.randint(0, 30)) login_log = UserLog.objects.create( user=t, activity_type=UserLog.get_activity_int("login"), start_datetime=date_logged_in, last_active_datetime=date_viewed_coachreport, end_datetime=date_logged_out, ) logging.info("created login log for teacher %s" % t.username) coachreport_log = UserLog.objects.create( user=t, activity_type=UserLog.get_activity_int("coachreport"), start_datetime=date_viewed_coachreport, last_active_datetime=date_viewed_coachreport, end_datetime=date_viewed_coachreport, ) logs.append((login_log, coachreport_log)) logging.info("created coachreport log for teacher %s" % t.username) return logs
def generate_fake_coachreport_logs(password="******"): try: t = FacilityUser.objects.get( facility=Facility.objects.all()[0], username=random.choice(firstnames), ) except FacilityUser.DoesNotExist as e: t = FacilityUser( facility=Facility.objects.all()[0], username=random.choice(firstnames), ) t.set_password(password) t.save() # TODO: create flags later num_logs = 20 logs = [] for _ in xrange(num_logs): date_logged_in = datetime.datetime.now() - datetime.timedelta( days=random.randint(1, 10)) date_viewed_coachreport = date_logged_in + datetime.timedelta( minutes=random.randint(0, 30)) date_logged_out = date_viewed_coachreport + datetime.timedelta( minutes=random.randint(0, 30)) login_log = UserLog.objects.create( user=t, activity_type=UserLog.get_activity_int("login"), start_datetime=date_logged_in, last_active_datetime=date_viewed_coachreport, end_datetime=date_logged_out, ) logging.info("created login log for teacher %s" % t.username) coachreport_log = UserLog.objects.create( user=t, activity_type=UserLog.get_activity_int("coachreport"), start_datetime=date_viewed_coachreport, last_active_datetime=date_viewed_coachreport, end_datetime=date_viewed_coachreport, ) logs.append((login_log, coachreport_log)) logging.info("created coachreport log for teacher %s" % t.username) return logs
def _get_user_usage_data(users, groups=None, period_start=None, period_end=None, group_id=None): """ Returns facility user data, within the given date range. """ groups = groups or set([user.group for user in users]) # compute period start and end # Now compute stats, based on queried data num_exercises = len(get_node_cache('Exercise')) user_data = OrderedDict() group_data = OrderedDict() # Make queries efficiently exercise_logs = ExerciseLog.objects.filter(user__in=users, complete=True) video_logs = VideoLog.objects.filter(user__in=users) login_logs = UserLogSummary.objects.filter(user__in=users) # filter results if period_start: exercise_logs = exercise_logs.filter( completion_timestamp__gte=period_start) video_logs = video_logs.filter(completion_timestamp__gte=period_start) login_logs = login_logs.filter(start_datetime__gte=period_start) if period_end: exercise_logs = exercise_logs.filter( completion_timestamp__lte=period_end) video_logs = video_logs.filter(completion_timestamp__lte=period_end) login_logs = login_logs.filter(end_datetime__lte=period_end) # Force results in a single query exercise_logs = list(exercise_logs.values("exercise_id", "user__pk")) video_logs = list(video_logs.values("video_id", "user__pk")) login_logs = list( login_logs.values("activity_type", "total_seconds", "user__pk")) for user in users: user_data[user.pk] = OrderedDict() user_data[user.pk]["id"] = user.pk user_data[user.pk]["first_name"] = user.first_name user_data[user.pk]["last_name"] = user.last_name user_data[user.pk]["username"] = user.username user_data[user.pk]["group"] = user.group user_data[user.pk][ "total_report_views"] = 0 #report_stats["count__sum"] or 0 user_data[ user.pk]["total_logins"] = 0 # login_stats["count__sum"] or 0 user_data[user.pk][ "total_hours"] = 0 #login_stats["total_seconds__sum"] or 0)/3600. user_data[user.pk]["total_exercises"] = 0 user_data[user.pk]["pct_mastery"] = 0. user_data[user.pk]["exercises_mastered"] = [] user_data[user.pk]["total_videos"] = 0 user_data[user.pk]["videos_watched"] = [] for elog in exercise_logs: user_data[elog["user__pk"]]["total_exercises"] += 1 user_data[elog["user__pk"]]["pct_mastery"] += 1. / num_exercises user_data[elog["user__pk"]]["exercises_mastered"].append( elog["exercise_id"]) for vlog in video_logs: user_data[vlog["user__pk"]]["total_videos"] += 1 user_data[vlog["user__pk"]]["videos_watched"].append(vlog["video_id"]) for llog in login_logs: if llog["activity_type"] == UserLog.get_activity_int("coachreport"): user_data[llog["user__pk"]]["total_report_views"] += 1 elif llog["activity_type"] == UserLog.get_activity_int("login"): user_data[llog["user__pk"]]["total_hours"] += ( llog["total_seconds"]) / 3600. user_data[llog["user__pk"]]["total_logins"] += 1 for group in list(groups) + [None] * (group_id == None or _(group_id) == _( "Ungrouped")): # None for ungrouped, if no group_id passed. group_pk = getattr(group, "pk", None) group_name = getattr(group, "name", _("Ungrouped")) group_data[group_pk] = { "id": group_pk, "name": group_name, "total_logins": 0, "total_hours": 0, "total_users": 0, "total_videos": 0, "total_exercises": 0, "pct_mastery": 0, } # Add group data. Allow a fake group "Ungrouped" for user in users: group_pk = getattr(user.group, "pk", None) group_data[group_pk]["total_users"] += 1 group_data[group_pk]["total_logins"] += user_data[ user.pk]["total_logins"] group_data[group_pk]["total_hours"] += user_data[ user.pk]["total_hours"] group_data[group_pk]["total_videos"] += user_data[ user.pk]["total_videos"] group_data[group_pk]["total_exercises"] += user_data[ user.pk]["total_exercises"] total_mastery_so_far = (group_data[group_pk]["pct_mastery"] * (group_data[group_pk]["total_users"] - 1) + user_data[user.pk]["pct_mastery"]) group_data[group_pk][ "pct_mastery"] = total_mastery_so_far / group_data[group_pk][ "total_users"] if len(group_data) == 1 and group_data.has_key(None): if not group_data[None]["total_users"]: del group_data[None] return (user_data, group_data)
def _get_user_usage_data(users, groups=None, period_start=None, period_end=None, group_id=None): """ Returns facility user data, within the given date range. """ groups = groups or set([user.group for user in users]) # compute period start and end # Now compute stats, based on queried data user_data = OrderedDict() group_data = OrderedDict() # Make queries efficiently exercise_logs = ExerciseLog.objects.filter(user__in=users, complete=True) video_logs = VideoLog.objects.filter(user__in=users, total_seconds_watched__gt=0) login_logs = UserLogSummary.objects.filter(user__in=users) # filter results login_logs = login_logs.filter(total_seconds__gt=0) if period_start: exercise_logs = exercise_logs.filter(completion_timestamp__gte=period_start) video_logs = video_logs.filter(completion_timestamp__gte=period_start) if period_end: # MUST: Fix the midnight bug where period end covers up to the prior day only because # period end is datetime(year, month, day, hour=0, minute=0), meaning midnight of previous day. # Example: # If period_end == '2014-12-01', we cannot include the records dated '2014-12-01 09:30'. # So to fix this, we change it to '2014-12-01 23:59.999999'. period_end = dateutil.parser.parse(period_end) period_end = period_end + dateutil.relativedelta.relativedelta(days=+1, microseconds=-1) exercise_logs = exercise_logs.filter(completion_timestamp__lte=period_end) video_logs = video_logs.filter(completion_timestamp__lte=period_end) if period_start and period_end: exercise_logs = exercise_logs.filter( Q(completion_timestamp__gte=period_start) & Q(completion_timestamp__lte=period_end) ) q1 = ( Q(completion_timestamp__isnull=False) & Q(completion_timestamp__gte=period_start) & Q(completion_timestamp__lte=period_end) ) video_logs = video_logs.filter(q1) login_q1 = ( Q(start_datetime__gte=period_start) & Q(start_datetime__lte=period_end) & Q(end_datetime__gte=period_start) & Q(end_datetime__lte=period_end) ) login_logs = login_logs.filter(login_q1) # Force results in a single query exercise_logs = list(exercise_logs.values("exercise_id", "user__pk", "streak_progress")) video_logs = list(video_logs.values("video_id", "user__pk")) login_logs = list(login_logs.values("activity_type", "total_seconds", "user__pk")) for user in users: user_data[user.pk] = OrderedDict() user_data[user.pk]["id"] = user.pk user_data[user.pk]["first_name"] = user.first_name user_data[user.pk]["last_name"] = user.last_name user_data[user.pk]["username"] = user.username user_data[user.pk]["group"] = user.group user_data[user.pk]["total_report_views"] = 0 # report_stats["count__sum"] or 0 user_data[user.pk]["total_logins"] = 0 # login_stats["count__sum"] or 0 user_data[user.pk]["total_hours"] = 0 # login_stats["total_seconds__sum"] or 0)/3600. user_data[user.pk]["total_exercises"] = 0 user_data[user.pk]["pct_mastery"] = 0.0 user_data[user.pk]["exercises_mastered"] = [] user_data[user.pk]["total_videos"] = 0 user_data[user.pk]["videos_watched"] = [] for elog in exercise_logs: user_data[elog["user__pk"]]["total_exercises"] += 1 user_data[elog["user__pk"]]["pct_mastery"] += elog["streak_progress"] user_data[elog["user__pk"]]["exercises_mastered"].append(elog["exercise_id"]) for vlog in video_logs: user_data[vlog["user__pk"]]["total_videos"] += 1 user_data[vlog["user__pk"]]["videos_watched"].append(vlog["video_id"]) for llog in login_logs: if llog["activity_type"] == UserLog.get_activity_int("coachreport"): user_data[llog["user__pk"]]["total_report_views"] += 1 elif llog["activity_type"] == UserLog.get_activity_int("login"): user_data[llog["user__pk"]]["total_hours"] += (llog["total_seconds"]) / 3600.0 user_data[llog["user__pk"]]["total_logins"] += 1 for group in list(groups) + [None] * ( group_id == None or group_id == UNGROUPED ): # None for ungrouped, if no group_id passed. group_pk = getattr(group, "pk", None) group_name = getattr(group, "name", _(UNGROUPED)) group_title = getattr(group, "title", _(UNGROUPED)) group_data[group_pk] = { "id": group_pk, "name": group_name, "title": group_title, "total_logins": 0, "total_hours": 0, "total_users": 0, "total_videos": 0, "total_exercises": 0, "pct_mastery": 0, } # Add group data. Allow a fake group UNGROUPED for user in users: user_data[user.pk]["pct_mastery"] = user_data[user.pk]["pct_mastery"] / ( user_data[user.pk]["total_exercises"] or 1 ) group_pk = getattr(user.group, "pk", None) if group_pk not in group_data: logging.error("User %s still in nonexistent group %s!" % (user.id, group_pk)) continue group_data[group_pk]["total_users"] += 1 group_data[group_pk]["total_logins"] += user_data[user.pk]["total_logins"] group_data[group_pk]["total_hours"] += user_data[user.pk]["total_hours"] group_data[group_pk]["total_videos"] += user_data[user.pk]["total_videos"] group_data[group_pk]["total_exercises"] += user_data[user.pk]["total_exercises"] total_mastery_so_far = ( group_data[group_pk]["pct_mastery"] * (group_data[group_pk]["total_users"] - 1) + user_data[user.pk]["pct_mastery"] ) group_data[group_pk]["pct_mastery"] = total_mastery_so_far / group_data[group_pk]["total_users"] if len(group_data) == 1 and None in group_data: if not group_data[None]["total_users"]: del group_data[None] return (user_data, group_data)
def _get_user_usage_data(users, groups=None, period_start=None, period_end=None, group_id=None): """ Returns facility user data, within the given date range. """ groups = groups or set([user.group for user in users]) # compute period start and end # Now compute stats, based on queried data num_exercises = len(get_exercise_cache()) user_data = OrderedDict() group_data = OrderedDict() # Make queries efficiently exercise_logs = ExerciseLog.objects.filter(user__in=users, complete=True) video_logs = VideoLog.objects.filter(user__in=users, total_seconds_watched__gt=0) login_logs = UserLogSummary.objects.filter(user__in=users) # filter results login_logs = login_logs.filter(total_seconds__gt=0) if period_start: exercise_logs = exercise_logs.filter( completion_timestamp__gte=period_start) video_logs = video_logs.filter(completion_timestamp__gte=period_start) if period_end: # MUST: Fix the midnight bug where period end covers up to the prior day only because # period end is datetime(year, month, day, hour=0, minute=0), meaning midnight of previous day. # Example: # If period_end == '2014-12-01', we cannot include the records dated '2014-12-01 09:30'. # So to fix this, we change it to '2014-12-01 23:59.999999'. period_end = dateutil.parser.parse(period_end) period_end = period_end + dateutil.relativedelta.relativedelta( days=+1, microseconds=-1) exercise_logs = exercise_logs.filter( completion_timestamp__lte=period_end) video_logs = video_logs.filter(completion_timestamp__lte=period_end) if period_start and period_end: exercise_logs = exercise_logs.filter( Q(completion_timestamp__gte=period_start) & Q(completion_timestamp__lte=period_end)) q1 = Q(completion_timestamp__isnull=False) & \ Q(completion_timestamp__gte=period_start) & \ Q(completion_timestamp__lte=period_end) video_logs = video_logs.filter(q1) login_q1 = Q(start_datetime__gte=period_start) & Q(start_datetime__lte=period_end) & \ Q(end_datetime__gte=period_start) & Q(end_datetime__lte=period_end) login_logs = login_logs.filter(login_q1) # Force results in a single query exercise_logs = list(exercise_logs.values("exercise_id", "user__pk")) video_logs = list(video_logs.values("video_id", "user__pk")) login_logs = list( login_logs.values("activity_type", "total_seconds", "user__pk")) for user in users: user_data[user.pk] = OrderedDict() user_data[user.pk]["id"] = user.pk user_data[user.pk]["first_name"] = user.first_name user_data[user.pk]["last_name"] = user.last_name user_data[user.pk]["username"] = user.username user_data[user.pk]["group"] = user.group user_data[user.pk][ "total_report_views"] = 0 #report_stats["count__sum"] or 0 user_data[ user.pk]["total_logins"] = 0 # login_stats["count__sum"] or 0 user_data[user.pk][ "total_hours"] = 0 #login_stats["total_seconds__sum"] or 0)/3600. user_data[user.pk]["total_exercises"] = 0 user_data[user.pk]["pct_mastery"] = 0. user_data[user.pk]["exercises_mastered"] = [] user_data[user.pk]["total_videos"] = 0 user_data[user.pk]["videos_watched"] = [] for elog in exercise_logs: user_data[elog["user__pk"]]["total_exercises"] += 1 user_data[elog["user__pk"]]["pct_mastery"] += 1. / num_exercises user_data[elog["user__pk"]]["exercises_mastered"].append( elog["exercise_id"]) for vlog in video_logs: user_data[vlog["user__pk"]]["total_videos"] += 1 user_data[vlog["user__pk"]]["videos_watched"].append(vlog["video_id"]) for llog in login_logs: if llog["activity_type"] == UserLog.get_activity_int("coachreport"): user_data[llog["user__pk"]]["total_report_views"] += 1 elif llog["activity_type"] == UserLog.get_activity_int("login"): user_data[llog["user__pk"]]["total_hours"] += ( llog["total_seconds"]) / 3600. user_data[llog["user__pk"]]["total_logins"] += 1 for group in list(groups) + [None] * ( group_id == None or group_id == UNGROUPED): # None for ungrouped, if no group_id passed. group_pk = getattr(group, "pk", None) group_name = getattr(group, "name", _(UNGROUPED)) group_title = getattr(group, "title", _(UNGROUPED)) group_data[group_pk] = { "id": group_pk, "name": group_name, "title": group_title, "total_logins": 0, "total_hours": 0, "total_users": 0, "total_videos": 0, "total_exercises": 0, "pct_mastery": 0, } # Add group data. Allow a fake group UNGROUPED for user in users: group_pk = getattr(user.group, "pk", None) if group_pk not in group_data: logging.error("User %s still in nonexistent group %s!" % (user.id, group_pk)) continue group_data[group_pk]["total_users"] += 1 group_data[group_pk]["total_logins"] += user_data[ user.pk]["total_logins"] group_data[group_pk]["total_hours"] += user_data[ user.pk]["total_hours"] group_data[group_pk]["total_videos"] += user_data[ user.pk]["total_videos"] group_data[group_pk]["total_exercises"] += user_data[ user.pk]["total_exercises"] total_mastery_so_far = (group_data[group_pk]["pct_mastery"] * (group_data[group_pk]["total_users"] - 1) + user_data[user.pk]["pct_mastery"]) group_data[group_pk][ "pct_mastery"] = total_mastery_so_far / group_data[group_pk][ "total_users"] if len(group_data) == 1 and group_data.has_key(None): if not group_data[None]["total_users"]: del group_data[None] return (user_data, group_data)
def _get_user_usage_data(users, groups=None, period_start=None, period_end=None, group_id=None): """ Returns facility user data, within the given date range. """ groups = groups or set([user.group for user in users]) # compute period start and end # Now compute stats, based on queried data num_exercises = len(get_node_cache('Exercise')) user_data = OrderedDict() group_data = OrderedDict() # Make queries efficiently exercise_logs = ExerciseLog.objects.filter(user__in=users, complete=True) video_logs = VideoLog.objects.filter(user__in=users) login_logs = UserLogSummary.objects.filter(user__in=users) # filter results if period_start: exercise_logs = exercise_logs.filter(completion_timestamp__gte=period_start) video_logs = video_logs.filter(completion_timestamp__gte=period_start) login_logs = login_logs.filter(start_datetime__gte=period_start) if period_end: exercise_logs = exercise_logs.filter(completion_timestamp__lte=period_end) video_logs = video_logs.filter(completion_timestamp__lte=period_end) login_logs = login_logs.filter(end_datetime__lte=period_end) # Force results in a single query exercise_logs = list(exercise_logs.values("exercise_id", "user__pk")) video_logs = list(video_logs.values("video_id", "user__pk")) login_logs = list(login_logs.values("activity_type", "total_seconds", "user__pk")) for user in users: user_data[user.pk] = OrderedDict() user_data[user.pk]["id"] = user.pk user_data[user.pk]["first_name"] = user.first_name user_data[user.pk]["last_name"] = user.last_name user_data[user.pk]["username"] = user.username user_data[user.pk]["group"] = user.group user_data[user.pk]["total_report_views"] = 0#report_stats["count__sum"] or 0 user_data[user.pk]["total_logins"] =0# login_stats["count__sum"] or 0 user_data[user.pk]["total_hours"] = 0#login_stats["total_seconds__sum"] or 0)/3600. user_data[user.pk]["total_exercises"] = 0 user_data[user.pk]["pct_mastery"] = 0. user_data[user.pk]["exercises_mastered"] = [] user_data[user.pk]["total_videos"] = 0 user_data[user.pk]["videos_watched"] = [] for elog in exercise_logs: user_data[elog["user__pk"]]["total_exercises"] += 1 user_data[elog["user__pk"]]["pct_mastery"] += 1. / num_exercises user_data[elog["user__pk"]]["exercises_mastered"].append(elog["exercise_id"]) for vlog in video_logs: user_data[vlog["user__pk"]]["total_videos"] += 1 user_data[vlog["user__pk"]]["videos_watched"].append(vlog["video_id"]) for llog in login_logs: if llog["activity_type"] == UserLog.get_activity_int("coachreport"): user_data[llog["user__pk"]]["total_report_views"] += 1 elif llog["activity_type"] == UserLog.get_activity_int("login"): user_data[llog["user__pk"]]["total_hours"] += (llog["total_seconds"]) / 3600. user_data[llog["user__pk"]]["total_logins"] += 1 for group in list(groups) + [None]*(group_id==None or group_id=="Ungrouped"): # None for ungrouped, if no group_id passed. group_pk = getattr(group, "pk", None) group_name = getattr(group, "name", _("Ungrouped")) group_data[group_pk] = { "id": group_pk, "name": group_name, "total_logins": 0, "total_hours": 0, "total_users": 0, "total_videos": 0, "total_exercises": 0, "pct_mastery": 0, } # Add group data. Allow a fake group "Ungrouped" for user in users: group_pk = getattr(user.group, "pk", None) if group_pk not in group_data: logging.error("User %s still in nonexistent group %s!" % (user.id, group_pk)) continue group_data[group_pk]["total_users"] += 1 group_data[group_pk]["total_logins"] += user_data[user.pk]["total_logins"] group_data[group_pk]["total_hours"] += user_data[user.pk]["total_hours"] group_data[group_pk]["total_videos"] += user_data[user.pk]["total_videos"] group_data[group_pk]["total_exercises"] += user_data[user.pk]["total_exercises"] total_mastery_so_far = (group_data[group_pk]["pct_mastery"] * (group_data[group_pk]["total_users"] - 1) + user_data[user.pk]["pct_mastery"]) group_data[group_pk]["pct_mastery"] = total_mastery_so_far / group_data[group_pk]["total_users"] if len(group_data) == 1 and group_data.has_key(None): if not group_data[None]["total_users"]: del group_data[None] return (user_data, group_data)
def generate_fake_exercise_logs(facility_user=None, topics=topics, start_date=datetime.datetime.now() - datetime.timedelta(days=30 * 6)): """Add exercise logs for the given topics, for each of the given users. If no users are given, they are created. If no topics exist, they are taken from the list at the top of this file. By default, users start learning randomly between 6 months ago and now. """ date_diff = datetime.datetime.now() - start_date exercise_logs = [] user_logs = [] # It's not a user: probably a list. # Recursive case if not hasattr(facility_user, "username"): # It's NONE :-/ generate the users first! if not facility_user: (facility_user, _, _) = generate_fake_facility_users() for topic in topics: for user in facility_user: (elogs, ulogs) = generate_fake_exercise_logs(facility_user=user, topics=[topic], start_date=start_date) exercise_logs.append(elogs) user_logs.append(ulogs) # Actually generate! else: # Get (or create) user type try: user_settings = json.loads(facility_user.notes) except: user_settings = sample_user_settings() facility_user.notes = json.dumps(user_settings) facility_user.save() date_diff_started = datetime.timedelta(seconds=datediff(date_diff, units="seconds") * user_settings["time_in_program"]) # when this user started in the program, relative to NOW for topic in topics: # Get all exercises related to the topic exercises = get_topic_contents(topic_id=topic, kinds=["Exercise"]) # Problem: # Not realistic for students to have lots of unfinished exercises. # If they start them, they tend to get stuck, right? # # So, need to make it more probable that they will finish an exercise, # and less probable that they start one. # # What we need is P(streak|started), not P(streak) # Probability of doing any particular exercise p_exercise = probability_of(qty="exercise", user_settings=user_settings) logging.info("# exercises: %d; p(exercise)=%4.3f, user settings: %s\n" % (len(exercises), p_exercise, json.dumps(user_settings))) # of exercises is related to for j, exercise in enumerate(exercises): if random.random() > p_exercise: continue # Probability of completing this exercise, and .. proportion of attempts p_attempts = probability_of(qty="attempts", user_settings=user_settings) attempts = int(random.random() * p_attempts * 30 + 10) # always enough to have completed elog, created = ExerciseLog.objects.get_or_create(user=facility_user, exercise_id=exercise["id"]) alogs = [] for i in range(0, attempts): alog = AttemptLog.objects.create(user=facility_user, exercise_id=exercise["id"], timestamp=start_date + date_diff*i/attempts) alogs.append(alog) if random.random() < user_settings["speed_of_learning"]: alog.correct = True alog.points = 10 alog.save() elog.attempts = attempts elog.latest_activity_timestamp = start_date + date_diff elog.streak_progress = sum([log.correct for log in alogs][-10:])*10 elog.points = sum([log.points for log in alogs][-10:]) elog.save() exercise_logs.append(elog) # Generate a user log regarding exercises done duration = random.randint(10 * 60, 120 * 60) # 10 - 120 minutes in seconds exercise_start = start_date + timedelta(seconds=random.randint(0, int(date_diff.total_seconds() - duration))) exercise_end = exercise_start + timedelta(seconds=duration) ulog = UserLog( user=facility_user, activity_type=UserLog.get_activity_int("login"), start_datetime=exercise_start, end_datetime=exercise_end, last_active_datetime=exercise_end, ) ulog.save() user_logs.append(ulog) return (exercise_logs, user_logs)