def generate_fake_coachreport_logs(): teacher_password = make_password('hellothere') t,_ = FacilityUser.objects.get_or_create( facility=Facility.objects.all()[0], username=random.choice(firstnames), defaults={ 'password' : teacher_password, 'is_teacher' : True, } ) # TODO: create flags later num_logs = 20 logs = [] for _ in xrange(num_logs): date_logged_in = datetime.datetime.now() - datetime.timedelta(days=random.randint(1,10)) date_viewed_coachreport = date_logged_in + datetime.timedelta(minutes=random.randint(0, 30)) date_logged_out = date_viewed_coachreport + datetime.timedelta(minutes=random.randint(0, 30)) login_log = UserLog.objects.create( user=t, activity_type=UserLog.get_activity_int("login"), start_datetime=date_logged_in, last_active_datetime=date_viewed_coachreport, end_datetime=date_logged_out, ) logging.info("created login log for teacher %s" % t.username) coachreport_log = UserLog.objects.create( user=t, activity_type=UserLog.get_activity_int("coachreport"), start_datetime=date_viewed_coachreport, last_active_datetime=date_viewed_coachreport, end_datetime=date_viewed_coachreport, ) logs.append((login_log, coachreport_log)) logging.info("created coachreport log for teacher %s" % t.username) return logs
def query_logs(users, items, logtype, logdict): """ Get a specified subset of logs for a particular set of users for either exercises or videos. users: list of users to query against. items: list of either exercises of videos to query. logtype: video or exercise - in future this could be expanded to query activity logs too. logdict: user keyed dictionary of logs (presumed to be empty by this code) """ if logtype == "exercise": all_logs = ExerciseLog.objects.filter(user__in=users, exercise_id__in=items).values( 'user', 'complete', 'exercise_id', 'attempts', 'points', 'struggling', 'completion_timestamp', 'streak_progress').order_by('completion_timestamp') elif logtype == "video": all_logs = VideoLog.objects.filter(user__in=users, video_id__in=items).values( 'user', 'complete', 'video_id', 'total_seconds_watched', 'completion_timestamp', 'points').order_by('completion_timestamp') elif logtype == "activity" and UserLog.is_enabled(): all_logs = UserLog.objects.filter(user__in=users).values( 'user', 'last_active_datetime', 'total_seconds').order_by('last_active_datetime') elif logtype == "summaryactivity" and UserLog.is_enabled(): all_logs = UserLogSummary.objects.filter(user__in=users).values( 'user', 'device', 'total_seconds').order_by('end_datetime') else: assert False, "Unknown log type: '%s'" % logtype # indicates a programming error for log in all_logs: logdict[log['user']].append(log) return logdict
def login(request, facility): facility_id = facility and facility.id or None facilities = list(Facility.objects.all()) # Fix for #1211: refresh cached facility info when it's free and relevant refresh_session_facility_info(request, facility_count=len(facilities)) if request.method == 'POST': # log out any Django user or facility user logout(request) username = request.POST.get("username", "") password = request.POST.get("password", "") # first try logging in as a Django user user = authenticate(username=username, password=password) if user: auth_login(request, user) return HttpResponseRedirect(request.next or reverse("easy_admin")) # try logging in as a facility user form = LoginForm(data=request.POST, request=request, initial={"facility": facility_id}) if form.is_valid(): user = form.get_user() try: UserLog.begin_user_activity(user, activity_type="login", language=request.language) # Success! Log the event (ignoring validation failures) except ValidationError as e: logging.error("Failed to begin_user_activity upon login: %s" % e) request.session["facility_user"] = user messages.success(request, _("You've been logged in! We hope you enjoy your time with KA Lite ") + _("-- be sure to log out when you finish.")) # Send them back from whence they came landing_page = form.cleaned_data["callback_url"] if not landing_page: # Just going back to the homepage? We can do better than that. landing_page = reverse("coach_reports") if form.get_user().is_teacher else None landing_page = landing_page or (reverse("account_management") if not settings.package_selected("RPi") else reverse("homepage")) return HttpResponseRedirect(form.non_field_errors() or request.next or landing_page) else: messages.error( request, _("There was an error logging you in. Please correct any errors listed below, and try again."), ) else: # render the unbound login form referer = urlparse.urlparse(request.META["HTTP_REFERER"]).path if request.META.get("HTTP_REFERER") else None # never use the homepage as the referer if referer in [reverse("homepage"), reverse("add_facility_student")]: referer = None form = LoginForm(initial={"facility": facility_id, "callback_url": referer}) return { "form": form, "facilities": facilities, }
def generate_fake_coachreport_logs(password="******"): t,_ = FacilityUser.objects.get_or_create( facility=Facility.objects.all()[0], username=random.choice(firstnames) ) t.set_password(password) # TODO: create flags later num_logs = 20 logs = [] for _ in xrange(num_logs): date_logged_in = datetime.datetime.now() - datetime.timedelta(days=random.randint(1,10)) date_viewed_coachreport = date_logged_in + datetime.timedelta(minutes=random.randint(0, 30)) date_logged_out = date_viewed_coachreport + datetime.timedelta(minutes=random.randint(0, 30)) login_log = UserLog.objects.create( user=t, activity_type=UserLog.get_activity_int("login"), start_datetime=date_logged_in, last_active_datetime=date_viewed_coachreport, end_datetime=date_logged_out, ) logging.info("created login log for teacher %s" % t.username) coachreport_log = UserLog.objects.create( user=t, activity_type=UserLog.get_activity_int("coachreport"), start_datetime=date_viewed_coachreport, last_active_datetime=date_viewed_coachreport, end_datetime=date_viewed_coachreport, ) logs.append((login_log, coachreport_log)) logging.info("created coachreport log for teacher %s" % t.username) return logs
def query_logs(users, items, logtype, logdict): """ Get a specified subset of logs for a particular set of users for either exercises or videos. users: list of users to query against. items: list of either exercises of videos to query. logtype: video or exercise - in future this could be expanded to query activity logs too. logdict: user keyed dictionary of logs (presumed to be empty by this code) """ if logtype == "exercise": all_logs = ExerciseLog.objects.filter( user__in=users, exercise_id__in=items).values( 'user', 'complete', 'exercise_id', 'attempts', 'points', 'struggling', 'completion_timestamp', 'streak_progress').order_by('completion_timestamp') elif logtype == "video": all_logs = VideoLog.objects.filter( user__in=users, youtube_id__in=items).values( 'user', 'complete', 'youtube_id', 'total_seconds_watched', 'completion_timestamp', 'points').order_by('completion_timestamp') elif logtype == "activity" and UserLog.is_enabled(): all_logs = UserLog.objects.filter(user__in=users).values( 'user', 'last_active_datetime', 'total_seconds').order_by('last_active_datetime') elif logtype == "summaryactivity" and UserLog.is_enabled(): all_logs = UserLogSummary.objects.filter(user__in=users).values( 'user', 'device', 'total_seconds').order_by('end_datetime') else: raise Exception("Unknown log type: '%s'" % logtype) for log in all_logs: logdict[log['user']].append(log) return logdict
def login(request, facility): facilities = Facility.objects.all() facility_id = facility and facility.id or None if request.method == 'POST': # log out any Django user or facility user logout(request) username = request.POST.get("username", "") password = request.POST.get("password", "") # first try logging in as a Django user user = authenticate(username=username, password=password) if user: auth_login(request, user) return HttpResponseRedirect(request.next or reverse("easy_admin")) # try logging in as a facility user form = LoginForm(data=request.POST, request=request, initial={"facility": facility_id}) if form.is_valid(): user = form.get_user() try: UserLog.begin_user_activity( user, activity_type="login" ) # Success! Log the event (ignoring validation failures) except ValidationError as e: logging.error("Failed to begin_user_activity upon login: %s" % e) request.session["facility_user"] = user messages.success( request, _("You've been logged in! We hope you enjoy your time with KA Lite " ) + _("-- be sure to log out when you finish.")) landing_page = reverse( "coach_reports") if form.get_user().is_teacher else None landing_page = landing_page or (reverse("account_management") if settings.CONFIG_PACKAGE != "RPi" else reverse("homepage")) return HttpResponseRedirect(form.non_field_errors() or request.next or landing_page) else: messages.error( request, strip_tags(form.non_field_errors()) or _("There was an error logging you in. Please correct any errors listed below, and try again." )) else: # render the unbound login form form = LoginForm(initial={"facility": facility_id}) return {"form": form, "facilities": facilities}
def logout(request): if "facility_user" in request.session: # Logout, ignore any errors. try: UserLog.end_user_activity(request.session["facility_user"], activity_type="login") except ValidationError as e: logging.error("Failed to end_user_activity upon logout: %s" % e) del request.session["facility_user"] auth_logout(request) next = request.GET.get("next", reverse("homepage")) if next[0] != "/": next = "/" return HttpResponseRedirect(next)
def login(request, facility): facilities = Facility.objects.all() facility_id = facility and facility.id or None if request.method == 'POST': # log out any Django user or facility user logout(request) username = request.POST.get("username", "") password = request.POST.get("password", "") # first try logging in as a Django user user = authenticate(username=username, password=password) if user: auth_login(request, user) return HttpResponseRedirect(request.next or reverse("easy_admin")) # try logging in as a facility user form = LoginForm(data=request.POST, request=request, initial={"facility": facility_id}) if form.is_valid(): user = form.get_user() try: UserLog.begin_user_activity(user, activity_type="login") # Success! Log the event (ignoring validation failures) except ValidationError as e: logging.debug("Failed to begin_user_activity upon login: %s" % e) request.session["facility_user"] = user messages.success(request, _("You've been logged in! We hope you enjoy your time with KA Lite ") + _("-- be sure to log out when you finish.")) return HttpResponseRedirect( form.non_field_errors() or request.next or reverse("coach_reports") if form.get_user().is_teacher else reverse("homepage") ) else: messages.error( request, strip_tags(form.non_field_errors()) or _("There was an error logging you in. Please correct any errors listed below, and try again.") ) else: # render the unbound login form form = LoginForm(initial={"facility": facility_id}) return { "form": form, "facilities": facilities }
def test_query_login_teacher(self): """Check the # of queries when logging in as a teacher.""" teacher = FacilityUser(is_teacher=True, username="******", facility=self.facility) passwd = self._gen_valid_password() teacher.set_password(passwd) teacher.save() with self.assertNumQueries(39 + 3*UserLog.is_enabled()): self.browser_login_teacher("t1", passwd, self.facility)
def test_query_login_student(self): """Check the # of queries when logging in as a student.""" student = FacilityUser(is_teacher=False, username="******", facility=self.facility) passwd = self._gen_valid_password() student.set_password(passwd) student.save() with self.assertNumQueries(39 + 3*UserLog.is_enabled()): self.browser_login_student("s1", passwd, self.facility)
def login(): """用户登录 参数: 邮箱 密码 json """ # 获取参数 current_app.logger.info("request_data: {}".format(request.get_data())) try: current_app.logger.info("request_json: {}".format(request.get_json())) req_dict = request.get_json() except Exception as e: current_app.logger.info(e) return jsonify(errno=RET.NOTJSON, errmsg="参数非Json格式") if req_dict is None: return jsonify(errno=RET.NOTJSON, errmsg="参数非Json格式") email = req_dict.get("email") password = req_dict.get("password") # 校验参数 # 参数完整的校验 if not all([email, password]): return jsonify(errno=RET.PARAMERR, errmsg="参数不完整") # 手机号的格式 if not re.match(r'^([\w]+\.*)([\w]+)\@[\w]+\.\w{3}(\.\w{2}|)$', email): return jsonify(errno=RET.PARAMERR, errmsg="邮箱格式错误") user_ip = request.remote_addr # 用户的ip地址 # 从数据库中根据邮箱,查询用户的数据对象 try: user = User.query.filter_by(email=email).first() except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.DBERR, errmsg="获取用户信息失败") # 用数据库的密码与用户填写的密码进行对比验证 if user is None or not user.check_password(password): current_app.logger.error(password) return jsonify(errno=RET.DATAERR, errmsg="用户名或密码错误") # 保存登录记录 user_log = UserLog(user_id=user.id, ip=user_ip) try: db.session.add(user_log) db.session.commit() except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.DBERR, errmag="数据库异常") # 如果验证相同成功,保存登录状态, 在 session中 session["name"] = user.name session["email"] = user.email session["user_id"] = user.id return jsonify(errno=RET.OK, errmsg="登录成功")
def test_query_login_student(self): """Check the # of queries when logging in as a student.""" student = FacilityUser(is_teacher=False, username="******", facility=self.facility) passwd = self._gen_valid_password() student.set_password(passwd) student.save() with self.assertNumQueries(39 + 3 * UserLog.is_enabled()): self.browser_login_student("s1", passwd, self.facility)
def test_query_login_teacher(self): """Check the # of queries when logging in as a teacher.""" teacher = FacilityUser(is_teacher=True, username="******", facility=self.facility) passwd = self._gen_valid_password() teacher.set_password(passwd) teacher.save() with self.assertNumQueries(39 + 3 * UserLog.is_enabled()): self.browser_login_teacher("t1", passwd, self.facility)
def account_management(request, org_id=None): # Only log 'coachreport' activity for students, # (otherwise it's hard to compare teachers) if "facility_user" in request.session and not request.session["facility_user"].is_teacher and reverse("login") not in request.META.get("HTTP_REFERER", ""): try: # Log a "begin" and end here user = request.session["facility_user"] UserLog.begin_user_activity(user, activity_type="coachreport") UserLog.update_user_activity(user, activity_type="login") # to track active login time for teachers UserLog.end_user_activity(user, activity_type="coachreport") except ValidationError as e: # Never report this error; don't want this logging to block other functionality. logging.error("Failed to update student userlog activity: %s" % e) return student_view_context(request)
def login(request, facility): facility_id = facility and facility.id or None facilities = list(Facility.objects.all()) # Fix for #1211: refresh cached facility info when it's free and relevant refresh_session_facility_info(request, facility_count=len(facilities)) if request.method == 'POST': # log out any Django user or facility user logout(request) username = request.POST.get("username", "") password = request.POST.get("password", "") # first try logging in as a Django user user = authenticate(username=username, password=password) if user: auth_login(request, user) return HttpResponseRedirect(request.next or reverse("easy_admin")) # try logging in as a facility user form = LoginForm(data=request.POST, request=request, initial={"facility": facility_id}) if form.is_valid(): user = form.get_user() try: UserLog.begin_user_activity( user, activity_type="login", language=request.language ) # Success! Log the event (ignoring validation failures) except ValidationError as e: logging.error("Failed to begin_user_activity upon login: %s" % e) request.session["facility_user"] = user messages.success( request, _("You've been logged in! We hope you enjoy your time with KA Lite " ) + _("-- be sure to log out when you finish.")) # Send them back from whence they came landing_page = form.cleaned_data["callback_url"] if not landing_page: # Just going back to the homepage? We can do better than that. landing_page = reverse( "coach_reports") if form.get_user().is_teacher else None landing_page = landing_page or ( reverse("account_management") if not settings.package_selected("RPi") else reverse("homepage")) return HttpResponseRedirect(form.non_field_errors() or request.next or landing_page) else: messages.error( request, _("There was an error logging you in. Please correct any errors listed below, and try again." ), ) else: # render the unbound login form referer = urlparse.urlparse( request.META["HTTP_REFERER"]).path if request.META.get( "HTTP_REFERER") else None # never use the homepage as the referer if referer in [reverse("homepage"), reverse("add_facility_student")]: referer = None form = LoginForm(initial={ "facility": facility_id, "callback_url": referer }) return { "form": form, "facilities": facilities, "sign_up_url": reverse("add_facility_student"), }
def facility_usage(request, facility_id, org_id=None, zone_id=None): # Basic data org = get_object_or_None(Organization, pk=org_id) if org_id else None zone = get_object_or_None(Zone, pk=zone_id) if zone_id else None facility = get_object_or_404(Facility, pk=facility_id) groups = FacilityGroup.objects.filter(facility=facility).order_by("name") users = FacilityUser.objects.filter(facility=facility).order_by("last_name") # Accumulating data len_all_exercises = len(topicdata.NODE_CACHE['Exercise']) group_data = collections.OrderedDict() user_data = collections.OrderedDict() for user in users: exercise_logs = ExerciseLog.objects.filter(user=user) exercise_stats = {"count": exercise_logs.count(), "total_mastery": exercise_logs.aggregate(Sum("complete"))["complete__sum"]} video_stats = {"count": VideoLog.objects.filter(user=user).count()} login_stats = UserLogSummary.objects \ .filter(user=user, activity_type=UserLog.get_activity_int("login")) \ .aggregate(Sum("count"), Sum("total_seconds")) user_data[user.pk] = { "first_name": user.first_name, "last_name": user.last_name, "name": user.get_name(), "group": user.group, "total_logins": login_stats["count__sum"] or 0, "total_hours": (login_stats["total_seconds__sum"] or 0)/3600., "total_videos": video_stats["count"], "total_exercises": exercise_stats["count"], "pct_mastery": (exercise_stats["total_mastery"] or 0)/float(len_all_exercises), } group = user.group if group: if not group.pk in group_data: group_data[group.pk] = { "name": group.name, "total_logins": 0, "total_hours": 0, "total_users": 0, "total_videos": 0, "total_exercises": 0, "pct_mastery": 0, } group_data[group.pk]["total_users"] += 1 group_data[group.pk]["total_logins"] += user_data[user.pk]["total_logins"] group_data[group.pk]["total_hours"] += user_data[user.pk]["total_hours"] group_data[group.pk]["total_videos"] += user_data[user.pk]["total_videos"] group_data[group.pk]["total_exercises"] += user_data[user.pk]["total_exercises"] total_mastery_so_far = (group_data[group.pk]["pct_mastery"] * (group_data[group.pk]["total_users"] - 1) + user_data[user.pk]["pct_mastery"]) group_data[group.pk]["pct_mastery"] = total_mastery_so_far / group_data[group.pk]["total_users"] return { "org": org, "zone": zone, "facility": facility, "groups": group_data, "users": user_data, }
def test_query_login_admin(self): with self.assertNumQueries(38 + 0 * UserLog.is_enabled()): self.browser_login_admin()
def test_query_logout_teacher(self): """""" self.test_query_login_teacher() with self.assertNumQueries(16 + 11 * UserLog.is_enabled()): self.browser_logout_user()
def _get_user_usage_data(users, period_start=None, period_end=None): """ Returns facility user data, within the given date range. """ # compute period start and end # Now compute stats, based on queried data num_exercises = len(topicdata.NODE_CACHE['Exercise']) user_data = OrderedDict() group_data = OrderedDict() # Make queries efficiently exercise_logs = ExerciseLog.objects.filter(user__in=users, complete=True) video_logs = VideoLog.objects.filter(user__in=users) login_logs = UserLogSummary.objects.filter(user__in=users) # filter results if period_start: exercise_logs = exercise_logs.filter( completion_timestamp__gte=period_start) video_logs = video_logs.filter(completion_timestamp__gte=period_start) login_logs = login_logs.filter(start_datetime__gte=period_start) if period_end: exercise_logs = exercise_logs.filter( completion_timestamp__lte=period_end) video_logs = video_logs.filter(completion_timestamp__lte=period_end) login_logs = login_logs.filter(end_datetime__lte=period_end) # Force results in a single query exercise_logs = list(exercise_logs.values("exercise_id", "user__pk")) video_logs = list(video_logs.values("video_id", "user__pk")) login_logs = list( login_logs.values("activity_type", "total_seconds", "user__pk")) for user in users: user_data[user.pk] = OrderedDict() user_data[user.pk]["first_name"] = user.first_name user_data[user.pk]["last_name"] = user.last_name user_data[user.pk]["username"] = user.username user_data[user.pk]["group"] = user.group user_data[user.pk][ "total_report_views"] = 0 #report_stats["count__sum"] or 0 user_data[ user.pk]["total_logins"] = 0 # login_stats["count__sum"] or 0 user_data[user.pk][ "total_hours"] = 0 #login_stats["total_seconds__sum"] or 0)/3600. user_data[user.pk]["total_exercises"] = 0 user_data[user.pk]["pct_mastery"] = 0. user_data[user.pk]["exercises_mastered"] = [] user_data[user.pk]["total_videos"] = 0 user_data[user.pk]["videos_watched"] = [] for elog in exercise_logs: user_data[elog["user__pk"]]["total_exercises"] += 1 user_data[elog["user__pk"]]["pct_mastery"] += 1. / num_exercises user_data[elog["user__pk"]]["exercises_mastered"].append( elog["exercise_id"]) for vlog in video_logs: user_data[vlog["user__pk"]]["total_videos"] += 1 user_data[vlog["user__pk"]]["videos_watched"].append(vlog["video_id"]) for llog in login_logs: if llog["activity_type"] == UserLog.get_activity_int("coachreport"): user_data[llog["user__pk"]]["total_report_views"] += 1 elif llog["activity_type"] == UserLog.get_activity_int("login"): user_data[llog["user__pk"]]["total_hours"] += ( llog["total_seconds"]) / 3600. user_data[llog["user__pk"]]["total_logins"] += 1 # Add group data. Allow a fake group "Ungrouped" for user in users: group_pk = getattr(user.group, "pk", None) group_name = getattr(user.group, "name", _("Ungrouped")) if not group_pk in group_data: group_data[group_pk] = { "name": group_name, "total_logins": 0, "total_hours": 0, "total_users": 0, "total_videos": 0, "total_exercises": 0, "pct_mastery": 0, } group_data[group_pk]["total_users"] += 1 group_data[group_pk]["total_logins"] += user_data[ user.pk]["total_logins"] group_data[group_pk]["total_hours"] += user_data[ user.pk]["total_hours"] group_data[group_pk]["total_videos"] += user_data[ user.pk]["total_videos"] group_data[group_pk]["total_exercises"] += user_data[ user.pk]["total_exercises"] total_mastery_so_far = (group_data[group_pk]["pct_mastery"] * (group_data[group_pk]["total_users"] - 1) + user_data[user.pk]["pct_mastery"]) group_data[group_pk][ "pct_mastery"] = total_mastery_so_far / group_data[group_pk][ "total_users"] return (user_data, group_data)
def facility_usage(request, facility_id, org_id=None, zone_id=None): # Basic data org = get_object_or_None(Organization, pk=org_id) if org_id else None zone = get_object_or_None(Zone, pk=zone_id) if zone_id else None facility = get_object_or_404(Facility, pk=facility_id) groups = FacilityGroup.objects.filter(facility=facility).order_by("name") users = FacilityUser.objects.filter( facility=facility).order_by("last_name") # Accumulating data len_all_exercises = len(topicdata.NODE_CACHE['Exercise']) group_data = collections.OrderedDict() user_data = collections.OrderedDict() for user in users: exercise_logs = ExerciseLog.objects.filter(user=user) exercise_stats = { "count": exercise_logs.count(), "total_mastery": exercise_logs.aggregate(Sum("complete"))["complete__sum"] } video_stats = {"count": VideoLog.objects.filter(user=user).count()} login_stats = UserLogSummary.objects \ .filter(user=user, activity_type=UserLog.get_activity_int("login")) \ .aggregate(Sum("count"), Sum("total_seconds")) user_data[user.pk] = { "first_name": user.first_name, "last_name": user.last_name, "name": user.get_name(), "group": user.group, "total_logins": login_stats["count__sum"] or 0, "total_hours": (login_stats["total_seconds__sum"] or 0) / 3600., "total_videos": video_stats["count"], "total_exercises": exercise_stats["count"], "pct_mastery": (exercise_stats["total_mastery"] or 0) / float(len_all_exercises), } group = user.group if group: if not group.pk in group_data: group_data[group.pk] = { "name": group.name, "total_logins": 0, "total_hours": 0, "total_users": 0, "total_videos": 0, "total_exercises": 0, "pct_mastery": 0, } group_data[group.pk]["total_users"] += 1 group_data[group.pk]["total_logins"] += user_data[ user.pk]["total_logins"] group_data[group.pk]["total_hours"] += user_data[ user.pk]["total_hours"] group_data[group.pk]["total_videos"] += user_data[ user.pk]["total_videos"] group_data[group.pk]["total_exercises"] += user_data[ user.pk]["total_exercises"] total_mastery_so_far = (group_data[group.pk]["pct_mastery"] * (group_data[group.pk]["total_users"] - 1) + user_data[user.pk]["pct_mastery"]) group_data[ group.pk]["pct_mastery"] = total_mastery_so_far / group_data[ group.pk]["total_users"] return { "org": org, "zone": zone, "facility": facility, "groups": group_data, "users": user_data, }
def test_query_logout_student(self): """""" self.test_query_login_student() with self.assertNumQueries(14 + 11*UserLog.is_enabled()): self.browser_logout_user()
def test_query_logout_admin(self): """""" self.test_query_login_admin() with self.assertNumQueries(17 + 0*UserLog.is_enabled()): self.browser_logout_user()
"key": "user:total_seconds", "name": _("Active Time Per Login"), "type": "number", "description": "Duration of each login session.", "noscatter": True, "timeline": True }, { "key": "user:last_active_datetime", "name": _("Time Session Completed"), "type": "datetime", "description": _("Day/time the login session finished.") }, ] if UserLog.is_enabled(): stats_dict.extend(user_log_stats_dict) def get_data_form(request, *args, **kwargs): """Get the basic data form, by combining information from keyword arguments and the request.REQUEST object. Along the way, check permissions to make sure whatever's being requested is OK. Request objects get priority over keyword args. """ assert not args, "all non-request args should be keyword args" # Pull the form parameters out of the request or data = dict() # Default to empty string, as it makes template handling cleaner later.
def test_query_logout_student(self): """""" self.test_query_login_student() with self.assertNumQueries(14 + 11 * UserLog.is_enabled()): self.browser_logout_user()
def tabular_view(request, facility, report_type="exercise"): """Tabular view also gets data server-side.""" # Get a list of topics (sorted) and groups topics = get_knowledgemap_topics() (groups, facilities) = get_accessible_objects_from_logged_in_user(request) context = plotting_metadata_context(request, facility=facility) context.update({ "report_types": ("exercise", "video"), "request_report_type": report_type, "topics": topics, }) # get querystring info topic_id = request.GET.get("topic", "") # No valid data; just show generic if not topic_id or not re.match("^[\w\-]+$", topic_id): return context group_id = request.GET.get("group", "") if group_id: # Narrow by group users = FacilityUser.objects.filter( group=group_id, is_teacher=False).order_by("last_name", "first_name") elif facility: # Narrow by facility search_groups = [dict["groups"] for dict in groups if dict["facility"] == facility.id] assert len(search_groups) <= 1, "should only have one or zero matches." # Return groups and ungrouped search_groups = search_groups[0] # make sure to include ungrouped students users = FacilityUser.objects.filter( Q(group__in=search_groups) | Q(group=None, facility=facility), is_teacher=False).order_by("last_name", "first_name") else: # Show all (including ungrouped) for groups_dict in groups: search_groups += groups_dict["groups"] users = FacilityUser.objects.filter( Q(group__in=search_groups) | Q(group=None), is_teacher=False).order_by("last_name", "first_name") # We have enough data to render over a group of students # Get type-specific information if report_type == "exercise": # Fill in exercises exercises = get_topic_exercises(topic_id=topic_id) exercises = sorted(exercises, key=lambda e: (e["h_position"], e["v_position"])) context["exercises"] = exercises # More code, but much faster exercise_names = [ex["name"] for ex in context["exercises"]] # Get students context["students"] = [] exlogs = ExerciseLog.objects \ .filter(user__in=users, exercise_id__in=exercise_names) \ .order_by("user__last_name", "user__first_name")\ .values("user__id", "struggling", "complete", "exercise_id") exlogs = list(exlogs) # force the query to be evaluated exlog_idx = 0 for user in users: log_table = {} while exlog_idx < len(exlogs) and exlogs[exlog_idx]["user__id"] == user.id: log_table[exlogs[exlog_idx]["exercise_id"]] = exlogs[exlog_idx] exlog_idx += 1 context["students"].append({ # this could be DRYer "first_name": user.first_name, "last_name": user.last_name, "username": user.username, "name": user.get_name(), "id": user.id, "exercise_logs": log_table, }) elif report_type == "video": # Fill in videos context["videos"] = get_topic_videos(topic_id=topic_id) # More code, but much faster video_ids = [vid["youtube_id"] for vid in context["videos"]] # Get students context["students"] = [] vidlogs = VideoLog.objects \ .filter(user__in=users, youtube_id__in=video_ids) \ .order_by("user__last_name", "user__first_name")\ .values("user__id", "complete", "youtube_id", "total_seconds_watched", "points") vidlogs = list(vidlogs) # force the query to be executed now vidlog_idx = 0 for user in users: log_table = {} while vidlog_idx < len(vidlogs) and vidlogs[vidlog_idx]["user__id"] == user.id: log_table[vidlogs[vidlog_idx]["youtube_id"]] = vidlogs[vidlog_idx] vidlog_idx += 1 context["students"].append({ # this could be DRYer "first_name": user.first_name, "last_name": user.last_name, "username": user.username, "name": user.get_name(), "id": user.id, "video_logs": log_table, }) else: raise Http404("Unknown report_type: %s" % report_type) if "facility_user" in request.session: try: # Log a "begin" and end here user = request.session["facility_user"] UserLog.begin_user_activity(user, activity_type="coachreport") UserLog.update_user_activity(user, activity_type="login") # to track active login time for teachers UserLog.end_user_activity(user, activity_type="coachreport") except ValidationError as e: # Never report this error; don't want this logging to block other functionality. logging.error("Failed to update Teacher userlog activity login: %s" % e) return context
def tabular_view(request, facility, report_type="exercise"): """Tabular view also gets data server-side.""" # Get a list of topics (sorted) and groups topics = get_knowledgemap_topics() (groups, facilities) = get_accessible_objects_from_logged_in_user(request) context = plotting_metadata_context(request, facility=facility) context.update({ "report_types": ("exercise", "video"), "request_report_type": report_type, "topics": topics, }) # get querystring info topic_id = request.GET.get("topic", "") # No valid data; just show generic if not topic_id or not re.match("^[\w\-]+$", topic_id): return context group_id = request.GET.get("group", "") if group_id: # Narrow by group users = FacilityUser.objects.filter(group=group_id, is_teacher=False).order_by( "last_name", "first_name") elif facility: # Narrow by facility search_groups = [ dict["groups"] for dict in groups if dict["facility"] == facility.id ] assert len(search_groups) <= 1, "should only have one or zero matches." # Return groups and ungrouped search_groups = search_groups[ 0] # make sure to include ungrouped students users = FacilityUser.objects.filter( Q(group__in=search_groups) | Q(group=None, facility=facility), is_teacher=False).order_by("last_name", "first_name") else: # Show all (including ungrouped) for groups_dict in groups: search_groups += groups_dict["groups"] users = FacilityUser.objects.filter( Q(group__in=search_groups) | Q(group=None), is_teacher=False).order_by("last_name", "first_name") # We have enough data to render over a group of students # Get type-specific information if report_type == "exercise": # Fill in exercises exercises = get_topic_exercises(topic_id=topic_id) exercises = sorted(exercises, key=lambda e: (e["h_position"], e["v_position"])) context["exercises"] = exercises # More code, but much faster exercise_names = [ex["name"] for ex in context["exercises"]] # Get students context["students"] = [] exlogs = ExerciseLog.objects \ .filter(user__in=users, exercise_id__in=exercise_names) \ .order_by("user__last_name", "user__first_name")\ .values("user__id", "struggling", "complete", "exercise_id") exlogs = list(exlogs) # force the query to be evaluated exlog_idx = 0 for user in users: log_table = {} while exlog_idx < len( exlogs) and exlogs[exlog_idx]["user__id"] == user.id: log_table[exlogs[exlog_idx]["exercise_id"]] = exlogs[exlog_idx] exlog_idx += 1 context["students"].append({ # this could be DRYer "first_name": user.first_name, "last_name": user.last_name, "username": user.username, "name": user.get_name(), "id": user.id, "exercise_logs": log_table, }) elif report_type == "video": # Fill in videos context["videos"] = get_topic_videos(topic_id=topic_id) # More code, but much faster video_ids = [vid["youtube_id"] for vid in context["videos"]] # Get students context["students"] = [] vidlogs = VideoLog.objects \ .filter(user__in=users, youtube_id__in=video_ids) \ .order_by("user__last_name", "user__first_name")\ .values("user__id", "complete", "youtube_id", "total_seconds_watched", "points") vidlogs = list(vidlogs) # force the query to be executed now vidlog_idx = 0 for user in users: log_table = {} while vidlog_idx < len( vidlogs) and vidlogs[vidlog_idx]["user__id"] == user.id: log_table[vidlogs[vidlog_idx] ["youtube_id"]] = vidlogs[vidlog_idx] vidlog_idx += 1 context["students"].append({ # this could be DRYer "first_name": user.first_name, "last_name": user.last_name, "username": user.username, "name": user.get_name(), "id": user.id, "video_logs": log_table, }) else: raise Http404("Unknown report_type: %s" % report_type) if "facility_user" in request.session: try: # Log a "begin" and end here user = request.session["facility_user"] UserLog.begin_user_activity(user, activity_type="coachreport") UserLog.update_user_activity( user, activity_type="login" ) # to track active login time for teachers UserLog.end_user_activity(user, activity_type="coachreport") except ValidationError as e: # Never report this error; don't want this logging to block other functionality. logging.error( "Failed to update Teacher userlog activity login: %s" % e) return context
def compute_data(data_types, who, where): """ Compute the data in "data_types" for each user in "who", for the topics selected by "where" who: list of users where: topic_path data_types can include: pct_mastery effort attempts """ # None indicates that the data hasn't been queried yet. # We'll query it on demand, for efficiency topics = None exercises = None videos = None # Initialize an empty dictionary of data, video logs, exercise logs, for each user data = OrderedDict(zip([w.id for w in who], [dict() for i in range(len(who))])) # maintain the order of the users vid_logs = dict(zip([w.id for w in who], [[] for i in range(len(who))])) ex_logs = dict(zip([w.id for w in who], [[] for i in range(len(who))])) if UserLog.is_enabled(): activity_logs = dict(zip([w.id for w in who], [[] for i in range(len(who))])) # Set up queries (but don't run them), so we have really easy aliases. # Only do them if they haven't been done yet (tell this by passing in a value to the lambda function) # Topics: topics. # Exercises: names (ids for ExerciseLog objects) # Videos: video_id (ids for VideoLog objects) # This lambda partial creates a function to return all items with a particular path from the NODE_CACHE. search_fun_single_path = partial(lambda t, p: t["path"].startswith(p), p=tuple(where)) # This lambda partial creates a function to return all items with paths matching a list of paths from NODE_CACHE. search_fun_multi_path = partial(lambda ts, p: any([t["path"].startswith(p) for t in ts]), p=tuple(where)) # Functions that use the functions defined above to return topics, exercises, and videos based on paths. query_topics = partial(lambda t, sf: t if t is not None else [t[0]["id"] for t in filter(sf, get_node_cache('Topic').values())], sf=search_fun_single_path) query_exercises = partial(lambda e, sf: e if e is not None else [ex[0]["id"] for ex in filter(sf, get_node_cache('Exercise').values())], sf=search_fun_multi_path) query_videos = partial(lambda v, sf: v if v is not None else [vid[0]["id"] for vid in filter(sf, get_node_cache('Video').values())], sf=search_fun_multi_path) # No users, don't bother. if len(who) > 0: # Query out all exercises, videos, exercise logs, and video logs before looping to limit requests. # This means we could pull data for n-dimensional coach report displays with the same number of requests! # Note: User activity is polled inside the loop, to prevent possible slowdown for exercise and video reports. exercises = query_exercises(exercises) videos = query_videos(videos) if exercises: ex_logs = query_logs(data.keys(), exercises, "exercise", ex_logs) if videos: vid_logs = query_logs(data.keys(), videos, "video", vid_logs) for data_type in (data_types if not hasattr(data_types, "lower") else [data_types]): # convert list from string, if necessary if data_type in data[data.keys()[0]]: # if the first user has it, then all do; no need to calc again. continue # # These are summary stats: you only get one per user # if data_type == "pct_mastery": # Efficient query out, spread out to dict for user in data.keys(): data[user][data_type] = 0 if not ex_logs[user] else 100. * sum([el['complete'] for el in ex_logs[user]]) / float(len(exercises)) elif data_type == "effort": if "ex:attempts" in data[data.keys()[0]] and "vid:total_seconds_watched" in data[data.keys()[0]]: # exercises and videos would be initialized already for user in data.keys(): avg_attempts = 0 if len(exercises) == 0 else sum(data[user]["ex:attempts"].values()) / float(len(exercises)) avg_video_points = 0 if len(videos) == 0 else sum(data[user]["vid:total_seconds_watched"].values()) / float(len(videos)) data[user][data_type] = 100. * (0.5 * avg_attempts / 10. + 0.5 * avg_video_points / 750.) else: data_types += ["ex:attempts", "vid:total_seconds_watched", "effort"] # # These are detail stats: you get many per user # # Just querying out data directly: Video elif data_type.startswith("vid:") and data_type[4:] in [f.name for f in VideoLog._meta.fields]: for user in data.keys(): data[user][data_type] = OrderedDict([(v['video_id'], v[data_type[4:]]) for v in vid_logs[user]]) # Just querying out data directly: Exercise elif data_type.startswith("ex:") and data_type[3:] in [f.name for f in ExerciseLog._meta.fields]: for user in data.keys(): data[user][data_type] = OrderedDict([(el['exercise_id'], el[data_type[3:]]) for el in ex_logs[user]]) # User Log Queries elif data_type.startswith("user:"******"", "activity", activity_logs) for user in data.keys(): data[user][data_type] = [log[data_type[5:]] for log in activity_logs[user]] # User Summary Queries elif data_type.startswith("usersum:") and data_type[8:] in [f.name for f in UserLogSummary._meta.fields] and UserLog.is_enabled(): activity_logs = query_logs(data.keys(), "", "summaryactivity", activity_logs) for user in data.keys(): data[user][data_type] = sum([log[data_type[8:]] for log in activity_logs[user]]) # Unknown requested quantity else: raise Exception("Unknown type: '%s' not in %s" % (data_type, str([f.name for f in ExerciseLog._meta.fields]))) # Returning empty list instead of None allows javascript on client # side to read 'length' property without error. exercises = exercises or [] videos = videos or [] return { "data": data, "topics": topics, "exercises": exercises, "videos": videos, }
def _get_user_usage_data(users, period_start=None, period_end=None): """ Returns facility user data, within the given date range. """ # compute period start and end # Now compute stats, based on queried data len_all_exercises = len(topicdata.NODE_CACHE['Exercise']) user_data = OrderedDict() group_data = OrderedDict() # Make queries efficiently exercise_logs = ExerciseLog.objects.filter(user__in=users, complete=True) video_logs = VideoLog.objects.filter(user__in=users) login_logs = UserLogSummary.objects.filter(user__in=users) # filter results if period_start: exercise_logs = exercise_logs.filter(completion_timestamp__gte=period_start) video_logs = video_logs.filter(completion_timestamp__gte=period_start) login_logs = login_logs.filter(start_datetime__gte=period_start) if period_end: exercise_logs = exercise_logs.filter(completion_timestamp__lte=period_end) video_logs = video_logs.filter(completion_timestamp__lte=period_end) login_logs = login_logs.filter(end_datetime__lte=period_end) # Force results in a single query exercise_logs = list(exercise_logs.values("exercise_id", "user__pk")) video_logs = list(video_logs.values("youtube_id", "user__pk")) login_logs = list(login_logs.values("activity_type", "total_seconds", "user__pk")) for user in users: user_data[user.pk] = OrderedDict() user_data[user.pk]["first_name"] = user.first_name user_data[user.pk]["last_name"] = user.last_name user_data[user.pk]["username"] = user.username user_data[user.pk]["group"] = user.group user_data[user.pk]["total_report_views"] = 0#report_stats["count__sum"] or 0 user_data[user.pk]["total_logins"] =0# login_stats["count__sum"] or 0 user_data[user.pk]["total_hours"] = 0#login_stats["total_seconds__sum"] or 0)/3600. user_data[user.pk]["total_exercises"] = 0 user_data[user.pk]["pct_mastery"] = 0. user_data[user.pk]["exercises_mastered"] = [] user_data[user.pk]["total_videos"] = 0 user_data[user.pk]["videos_watched"] = [] for elog in exercise_logs: user_data[elog["user__pk"]]["total_exercises"] += 1 user_data[elog["user__pk"]]["pct_mastery"] += 1. / len_all_exercises user_data[elog["user__pk"]]["exercises_mastered"].append(elog["exercise_id"]) for vlog in video_logs: user_data[vlog["user__pk"]]["total_videos"] += 1 user_data[vlog["user__pk"]]["videos_watched"].append(vlog["youtube_id"]) for llog in login_logs: if llog["activity_type"] == UserLog.get_activity_int("coachreport"): user_data[llog["user__pk"]]["total_report_views"] += 1 elif llog["activity_type"] == UserLog.get_activity_int("login"): user_data[llog["user__pk"]]["total_hours"] += (llog["total_seconds"]) / 3600. user_data[llog["user__pk"]]["total_logins"] += 1 # Add group data. Allow a fake group "Ungrouped" for user in users: group_pk = getattr(user.group, "pk", None) group_name = getattr(user.group, "name", "Ungrouped") if not group_pk in group_data: group_data[group_pk] = { "name": group_name, "total_logins": 0, "total_hours": 0, "total_users": 0, "total_videos": 0, "total_exercises": 0, "pct_mastery": 0, } group_data[group_pk]["total_users"] += 1 group_data[group_pk]["total_logins"] += user_data[user.pk]["total_logins"] group_data[group_pk]["total_hours"] += user_data[user.pk]["total_hours"] group_data[group_pk]["total_videos"] += user_data[user.pk]["total_videos"] group_data[group_pk]["total_exercises"] += user_data[user.pk]["total_exercises"] total_mastery_so_far = (group_data[group_pk]["pct_mastery"] * (group_data[group_pk]["total_users"] - 1) + user_data[user.pk]["pct_mastery"]) group_data[group_pk]["pct_mastery"] = total_mastery_so_far / group_data[group_pk]["total_users"] return (user_data, group_data)
def api_data(request, xaxis="", yaxis=""): """Request contains information about what data are requested (who, what, and how). Response should be a JSON object * data contains the data, structred by user and then datatype * the rest of the data is metadata, useful for displaying detailed info about data. """ # Get the request form form = get_data_form(request, xaxis=xaxis, yaxis=yaxis) # (data=request.REQUEST) # Query out the data: who? if form.data.get("user"): facility = [] groups = [] users = [get_object_or_404(FacilityUser, id=form.data.get("user"))] elif form.data.get("group"): facility = [] groups = [get_object_or_404(FacilityGroup, id=form.data.get("group"))] users = FacilityUser.objects.filter(group=form.data.get("group"), is_teacher=False).order_by("last_name", "first_name") elif form.data.get("facility"): facility = get_object_or_404(Facility, id=form.data.get("facility")) groups = FacilityGroup.objects.filter(facility__in=[form.data.get("facility")]) users = FacilityUser.objects.filter(facility__in=[form.data.get("facility")], is_teacher=False).order_by("last_name", "first_name") else: return HttpResponseNotFound(_("Did not specify facility, group, nor user.")) # Query out the data: where? if not form.data.get("topic_path"): return HttpResponseNotFound(_("Must specify a topic path")) # Query out the data: what? computed_data = compute_data(data_types=[form.data.get("xaxis"), form.data.get("yaxis")], who=users, where=form.data.get("topic_path")) json_data = { "data": computed_data["data"], "exercises": computed_data["exercises"], "videos": computed_data["videos"], "users": dict(zip([u.id for u in users], ["%s, %s" % (u.last_name, u.first_name) for u in users] )), "groups": dict(zip([g.id for g in groups], dict(zip(["id", "name"], [(g.id, g.name) for g in groups])), )), "facility": None if not facility else { "name": facility.name, "id": facility.id, } } if "facility_user" in request.session: try: # Log a "begin" and end here user = request.session["facility_user"] UserLog.begin_user_activity(user, activity_type="coachreport") UserLog.update_user_activity(user, activity_type="login") # to track active login time for teachers UserLog.end_user_activity(user, activity_type="coachreport") except ValidationError as e: # Never report this error; don't want this logging to block other functionality. logging.error("Failed to update Teacher userlog activity login: %s" % e) # Now we have data, stream it back with a handler for date-times return JsonResponse(json_data)
def test_query_login_admin(self): with self.assertNumQueries(38 + 0*UserLog.is_enabled()): self.browser_login_admin()
def generate_fake_exercise_logs(facility_user=None, topics=topics, start_date=datetime.datetime.now() - datetime.timedelta(days=30 * 6)): """Add exercise logs for the given topics, for each of the given users. If no users are given, they are created. If no topics exist, they are taken from the list at the top of this file. By default, users start learning randomly between 6 months ago and now. """ own_device = Device.get_own_device() date_diff = datetime.datetime.now() - start_date exercise_logs = [] user_logs = [] # It's not a user: probably a list. # Recursive case if not hasattr(facility_user, "username"): # It's NONE :-/ generate the users first! if not facility_user: (facility_user, _, _) = generate_fake_facility_users() for topic in topics: for user in facility_user: (elogs, ulogs) = generate_fake_exercise_logs(facility_user=user, topics=[topic], start_date=start_date) exercise_logs.append(elogs) user_logs.append(ulogs) # Actually generate! else: # Get (or create) user type try: user_settings = json.loads(facility_user.notes) except: user_settings = sample_user_settings() facility_user.notes = json.dumps(user_settings) facility_user.save() date_diff_started = datetime.timedelta(seconds=datediff(date_diff, units="seconds") * user_settings["time_in_program"]) # when this user started in the program, relative to NOW for topic in topics: # Get all exercises related to the topic exercises = get_topic_exercises(topic_id=topic) # Problem: # Not realistic for students to have lots of unfinished exercises. # If they start them, they tend to get stuck, right? # # So, need to make it more probable that they will finish an exercise, # and less probable that they start one. # # What we need is P(streak|started), not P(streak) # Probability of doing any particular exercise p_exercise = probability_of(qty="exercise", user_settings=user_settings) logging.debug("# exercises: %d; p(exercise)=%4.3f, user settings: %s\n" % (len(exercises), p_exercise, json.dumps(user_settings))) # of exercises is related to for j, exercise in enumerate(exercises): if random.random() > p_exercise: continue # Probability of completing this exercise, and .. proportion of attempts p_completed = probability_of(qty="completed", user_settings=user_settings) p_attempts = probability_of(qty="attempts", user_settings=user_settings) attempts = int(random.random() * p_attempts * 30 + 10) # always enough to have completed completed = (random.random() < p_completed) if completed: streak_progress = 100 else: streak_progress = max(0, min(90, random.gauss(100 * user_settings["speed_of_learning"], 20))) streak_progress = int(floor(streak_progress / 10.)) * 10 points = streak_progress / 10 * 12 if completed else 0 # only get points when you master. # Choose a rate of exercises, based on their effort level and speed of learning. # Compute the latest possible start time. # Then sample a start time between their start time # and the latest possible start_time rate_of_exercises = 0.66 * user_settings["effort_level"] + 0.33 * user_settings["speed_of_learning"] # exercises per day time_for_attempts = min(datetime.timedelta(days=rate_of_exercises * attempts), date_diff_started) # protect with min time_delta_completed = datetime.timedelta(seconds=random.randint(int(datediff(time_for_attempts, units="seconds")), int(datediff(date_diff_started, units="seconds")))) date_completed = datetime.datetime.now() - time_delta_completed # Always create new logging.info("Creating exercise log: %-12s: %-25s (%d points, %d attempts, %d%% streak on %s)" % ( facility_user.first_name, exercise["name"], points, attempts, streak_progress, date_completed, )) try: elog = ExerciseLog.objects.get(user=facility_user, exercise_id=exercise["name"]) except ExerciseLog.DoesNotExist: elog = ExerciseLog( user=facility_user, exercise_id=exercise["name"], attempts=int(attempts), streak_progress=streak_progress, points=int(points), complete=completed, completion_timestamp=date_completed, completion_counter=datediff(date_completed, start_date, units="seconds"), ) elog.counter = own_device.increment_and_get_counter() elog.sign(own_device) # have to sign after setting the counter elog.save(imported=True) # avoid userlog issues # For now, make all attempts on an exercise into a single UserLog. seconds_per_attempt = 10 * (1 + user_settings["speed_of_learning"] * random.random()) time_to_navigate = 15 * (0.5 + random.random()) #between 7.5s and 22.5s time_to_logout = 5 * (0.5 + random.random()) # between 2.5 and 7.5s if settings.USER_LOG_MAX_RECORDS_PER_USER != 0: ulog = UserLog( user=facility_user, activity_type=1, start_datetime = date_completed - datetime.timedelta(seconds=int(attempts * seconds_per_attempt + time_to_navigate)), end_datetime = date_completed + datetime.timedelta(seconds=time_to_logout), last_active_datetime = date_completed, ) ulog.full_clean() ulog.save() user_logs.append(ulog) exercise_logs.append(elog) return (exercise_logs, user_logs)
def compute_data(data_types, who, where): """ Compute the data in "data_types" for each user in "who", for the topics selected by "where" who: list of users where: topic_path data_types can include: pct_mastery effort attempts """ # None indicates that the data hasn't been queried yet. # We'll query it on demand, for efficiency topics = None exercises = None videos = None # Initialize an empty dictionary of data, video logs, exercise logs, for each user data = OrderedDict( zip([w.id for w in who], [dict() for i in range(len(who))])) # maintain the order of the users vid_logs = dict(zip([w.id for w in who], [[] for i in range(len(who))])) ex_logs = dict(zip([w.id for w in who], [[] for i in range(len(who))])) if UserLog.is_enabled(): activity_logs = dict( zip([w.id for w in who], [[] for i in range(len(who))])) # Set up queries (but don't run them), so we have really easy aliases. # Only do them if they haven't been done yet (tell this by passing in a value to the lambda function) # Topics: topics. # Exercises: names (ids for ExerciseLog objects) # Videos: youtube_id (ids for VideoLog objects) # This lambda partial creates a function to return all items with a particular path from the NODECACHE. search_fun_single_path = partial(lambda t, p: t["path"].startswith(p), p=tuple(where)) # This lambda partial creates a function to return all items with paths matching a list of paths from NODECACHE. search_fun_multi_path = partial( lambda t, p: any([tp.startswith(p) for tp in t["paths"]]), p=tuple(where)) # Functions that use the functions defined above to return topics, exercises, and videos based on paths. query_topics = partial( lambda t, sf: t if t is not None else [t for t in filter(sf, topicdata.NODE_CACHE['Topic'].values())], sf=search_fun_single_path) query_exercises = partial(lambda e, sf: e if e is not None else [ ex["name"] for ex in filter(sf, topicdata.NODE_CACHE['Exercise'].values()) ], sf=search_fun_multi_path) query_videos = partial(lambda v, sf: v if v is not None else [ vid["youtube_id"] for vid in filter(sf, topicdata.NODE_CACHE['Video'].values()) ], sf=search_fun_multi_path) # No users, don't bother. if len(who) > 0: # Query out all exercises, videos, exercise logs, and video logs before looping to limit requests. # This means we could pull data for n-dimensional coach report displays with the same number of requests! # Note: User activity is polled inside the loop, to prevent possible slowdown for exercise and video reports. exercises = query_exercises(exercises) videos = query_videos(videos) if exercises: ex_logs = query_logs(data.keys(), exercises, "exercise", ex_logs) if videos: vid_logs = query_logs(data.keys(), videos, "video", vid_logs) for data_type in (data_types if not hasattr(data_types, "lower") else [ data_types ]): # convert list from string, if necessary if data_type in data[data.keys( )[0]]: # if the first user has it, then all do; no need to calc again. continue # # These are summary stats: you only get one per user # if data_type == "pct_mastery": # Efficient query out, spread out to dict for user in data.keys(): data[user][ data_type] = 0 if not ex_logs[user] else 100. * sum( [el['complete'] for el in ex_logs[user]]) / float(len(exercises)) elif data_type == "effort": if "ex:attempts" in data[data.keys( )[0]] and "vid:total_seconds_watched" in data[data.keys()[0]]: # exercises and videos would be initialized already for user in data.keys(): avg_attempts = 0 if len(exercises) == 0 else sum( data[user]["ex:attempts"].values()) / float( len(exercises)) avg_video_points = 0 if len(videos) == 0 else sum( data[user]["vid:total_seconds_watched"].values( )) / float(len(videos)) data[user][data_type] = 100. * ( 0.5 * avg_attempts / 10. + 0.5 * avg_video_points / 750.) else: data_types += [ "ex:attempts", "vid:total_seconds_watched", "effort" ] # # These are detail stats: you get many per user # # Just querying out data directly: Video elif data_type.startswith("vid:") and data_type[4:] in [ f.name for f in VideoLog._meta.fields ]: for user in data.keys(): data[user][data_type] = OrderedDict([ (v['youtube_id'], v[data_type[4:]]) for v in vid_logs[user] ]) # Just querying out data directly: Exercise elif data_type.startswith("ex:") and data_type[3:] in [ f.name for f in ExerciseLog._meta.fields ]: for user in data.keys(): data[user][data_type] = OrderedDict([ (el['exercise_id'], el[data_type[3:]]) for el in ex_logs[user] ]) # User Log Queries elif data_type.startswith("user:"******"", "activity", activity_logs) for user in data.keys(): data[user][data_type] = [ log[data_type[5:]] for log in activity_logs[user] ] # User Summary Queries elif data_type.startswith("usersum:") and data_type[8:] in [ f.name for f in UserLogSummary._meta.fields ] and UserLog.is_enabled(): activity_logs = query_logs(data.keys(), "", "summaryactivity", activity_logs) for user in data.keys(): data[user][data_type] = sum( [log[data_type[8:]] for log in activity_logs[user]]) # Unknown requested quantity else: raise Exception( "Unknown type: '%s' not in %s" % (data_type, str([f.name for f in ExerciseLog._meta.fields]))) # Returning empty list instead of None allows javascript on client # side to read 'length' property without error. exercises = exercises or [] videos = videos or [] return { "data": data, "topics": topics, "exercises": exercises, "videos": videos, }
def test_query_logout_admin(self): """""" self.test_query_login_admin() with self.assertNumQueries(17 + 0 * UserLog.is_enabled()): self.browser_logout_user()
def test_query_logout_teacher(self): """""" self.test_query_login_teacher() with self.assertNumQueries(16 + 11*UserLog.is_enabled()): self.browser_logout_user()
def api_data(request, xaxis="", yaxis=""): """Request contains information about what data are requested (who, what, and how). Response should be a JSON object * data contains the data, structred by user and then datatype * the rest of the data is metadata, useful for displaying detailed info about data. """ # Get the request form form = get_data_form(request, xaxis=xaxis, yaxis=yaxis) # (data=request.REQUEST) # Query out the data: who? if form.data.get("user"): facility = [] groups = [] users = [get_object_or_404(FacilityUser, id=form.data.get("user"))] elif form.data.get("group"): facility = [] groups = [get_object_or_404(FacilityGroup, id=form.data.get("group"))] users = FacilityUser.objects.filter(group=form.data.get("group"), is_teacher=False).order_by( "last_name", "first_name") elif form.data.get("facility"): facility = get_object_or_404(Facility, id=form.data.get("facility")) groups = FacilityGroup.objects.filter( facility__in=[form.data.get("facility")]) users = FacilityUser.objects.filter( facility__in=[form.data.get("facility")], is_teacher=False).order_by("last_name", "first_name") else: return HttpResponseNotFound( "Did not specify facility, group, nor user.") # Query out the data: where? if not form.data.get("topic_path"): return HttpResponseNotFound("Must specify a topic path") # Query out the data: what? computed_data = compute_data( data_types=[form.data.get("xaxis"), form.data.get("yaxis")], who=users, where=form.data.get("topic_path")) json_data = { "data": computed_data["data"], "exercises": computed_data["exercises"], "videos": computed_data["videos"], "users": dict( zip([u.id for u in users], ["%s, %s" % (u.last_name, u.first_name) for u in users])), "groups": dict( zip( [g.id for g in groups], dict(zip(["id", "name"], [(g.id, g.name) for g in groups])), )), "facility": None if not facility else { "name": facility.name, "id": facility.id, } } if "facility_user" in request.session: try: # Log a "begin" and end here user = request.session["facility_user"] UserLog.begin_user_activity(user, activity_type="coachreport") UserLog.update_user_activity( user, activity_type="login" ) # to track active login time for teachers UserLog.end_user_activity(user, activity_type="coachreport") except ValidationError as e: # Never report this error; don't want this logging to block other functionality. logging.error( "Failed to update Teacher userlog activity login: %s" % e) # Now we have data, stream it back with a handler for date-times return JsonResponse(json_data)
def generate_fake_exercise_logs(facility_user=None, topics=topics, start_date=datetime.datetime.now() - datetime.timedelta(days=30 * 6)): """Add exercise logs for the given topics, for each of the given users. If no users are given, they are created. If no topics exist, they are taken from the list at the top of this file. By default, users start learning randomly between 6 months ago and now. """ date_diff = datetime.datetime.now() - start_date exercise_logs = [] user_logs = [] # It's not a user: probably a list. # Recursive case if not hasattr(facility_user, "username"): # It's NONE :-/ generate the users first! if not facility_user: (facility_user, _, _) = generate_fake_facility_users() for topic in topics: for user in facility_user: (elogs, ulogs) = generate_fake_exercise_logs(facility_user=user, topics=[topic], start_date=start_date) exercise_logs.append(elogs) user_logs.append(ulogs) # Actually generate! else: # Get (or create) user type try: user_settings = json.loads(facility_user.notes) except: user_settings = sample_user_settings() facility_user.notes = json.dumps(user_settings) facility_user.save() date_diff_started = datetime.timedelta(seconds=datediff(date_diff, units="seconds") * user_settings["time_in_program"]) # when this user started in the program, relative to NOW for topic in topics: # Get all exercises related to the topic exercises = get_topic_exercises(topic_id=topic) # Problem: # Not realistic for students to have lots of unfinished exercises. # If they start them, they tend to get stuck, right? # # So, need to make it more probable that they will finish an exercise, # and less probable that they start one. # # What we need is P(streak|started), not P(streak) # Probability of doing any particular exercise p_exercise = probability_of(qty="exercise", user_settings=user_settings) logging.debug("# exercises: %d; p(exercise)=%4.3f, user settings: %s\n" % (len(exercises), p_exercise, json.dumps(user_settings))) # of exercises is related to for j, exercise in enumerate(exercises): if random.random() > p_exercise: continue # Probability of completing this exercise, and .. proportion of attempts p_completed = probability_of(qty="completed", user_settings=user_settings) p_attempts = probability_of(qty="attempts", user_settings=user_settings) attempts = int(random.random() * p_attempts * 30 + 10) # always enough to have completed completed = (random.random() < p_completed) if completed: streak_progress = 100 else: streak_progress = max(0, min(90, random.gauss(100 * user_settings["speed_of_learning"], 20))) streak_progress = int(floor(streak_progress / 10.)) * 10 points = streak_progress / 10 * 12 if completed else 0 # only get points when you master. # Choose a rate of exercises, based on their effort level and speed of learning. # Compute the latest possible start time. # Then sample a start time between their start time # and the latest possible start_time rate_of_exercises = 0.66 * user_settings["effort_level"] + 0.33 * user_settings["speed_of_learning"] # exercises per day time_for_attempts = min(datetime.timedelta(days=rate_of_exercises * attempts), date_diff_started) # protect with min time_delta_completed = datetime.timedelta(seconds=random.randint(int(datediff(time_for_attempts, units="seconds")), int(datediff(date_diff_started, units="seconds")))) date_completed = datetime.datetime.now() - time_delta_completed # Always create new logging.info("Creating exercise log: %-12s: %-25s (%d points, %d attempts, %d%% streak on %s)" % ( facility_user.first_name, exercise["name"], points, attempts, streak_progress, date_completed, )) try: elog = ExerciseLog.objects.get(user=facility_user, exercise_id=exercise["name"]) except ExerciseLog.DoesNotExist: elog = ExerciseLog( user=facility_user, exercise_id=exercise["name"], attempts=int(attempts), streak_progress=streak_progress, points=int(points), completion_timestamp=date_completed, completion_counter=datediff(date_completed, start_date, units="seconds"), ) elog.full_clean() elog.save() # TODO(bcipolli): bulk saving of logs # For now, make all attempts on an exercise into a single UserLog. seconds_per_attempt = 10 * (1 + user_settings["speed_of_learning"] * random.random()) time_to_navigate = 15 * (0.5 + random.random()) #between 7.5s and 22.5s time_to_logout = 5 * (0.5 + random.random()) # between 2.5 and 7.5s ulog = UserLog( user=facility_user, activity_type=1, start_datetime = date_completed - datetime.timedelta(seconds=int(attempts * seconds_per_attempt + time_to_navigate)), end_datetime = date_completed + datetime.timedelta(seconds=time_to_logout), last_active_datetime = date_completed, ) ulog.full_clean() ulog.save() user_logs.append(ulog) exercise_logs.append(elog) return (exercise_logs, user_logs)
{"key": "ex:attempts", "name": _("Average attempts"), "type": "number", "description": _("Number of times submitting an answer to an exercise.")}, {"key": "ex:streak_progress", "name": _("Average streak"), "type": "number", "description": _("Maximum number of consecutive correct answers on an exercise.")}, {"key": "ex:points", "name": _("Exercise points"), "type": "number", "description": _("[Pointless at the moment; tracks mastery linearly]")}, { "key": "ex:completion_timestamp", "name": _("Time exercise completed"),"type": "datetime", "description": _("Day/time the exercise was completed.") }, {"key": "vid:points", "name": _("Video points"), "type": "number", "description": _("Points earned while watching a video (750 max / video).")}, { "key": "vid:total_seconds_watched","name": _("Video time"), "type": "number", "description": _("Total seconds spent watching a video.") }, { "key": "vid:completion_timestamp", "name": _("Time video completed"),"type": "datetime", "description": _("Day/time the video was completed.") }, ] user_log_stats_dict = [ { "key": "usersum:total_seconds", "name": _("Time Active (s)"), "type": "number", "description": _("Total time spent actively logged in.")}, { "key": "user:total_seconds", "name": _("Active Time Per Login"), "type": "number", "description": _("Duration of each login session."), "noscatter": True, "timeline": True}, { "key": "user:last_active_datetime", "name": _("Time Session Completed"),"type": "datetime", "description": _("Day/time the login session finished.")}, ] if UserLog.is_enabled(): stats_dict.extend(user_log_stats_dict) def get_data_form(request, *args, **kwargs): """Get the basic data form, by combining information from keyword arguments and the request.REQUEST object. Along the way, check permissions to make sure whatever's being requested is OK. Request objects get priority over keyword args. """ assert not args, "all non-request args should be keyword args" # Pull the form parameters out of the request or data = dict() # Default to empty string, as it makes template handling cleaner later. for field in ["facility", "group", "user", "xaxis", "yaxis"]: