Esempio n. 1
0
def query_logs(users, items, logtype, logdict):
    """
    Get a specified subset of logs for a particular set of users for either exercises or videos.
    users: list of users to query against.
    items: list of either exercises of videos to query.
    logtype: video or exercise - in future this could be expanded to query activity logs too.
    logdict: user keyed dictionary of logs (presumed to be empty by this code)
    """

    if logtype == "exercise":
        all_logs = ExerciseLog.objects.filter(user__in=users, exercise_id__in=items).values(
                        'user', 'complete', 'exercise_id', 'attempts', 'points', 'struggling', 'completion_timestamp', 'streak_progress').order_by('completion_timestamp')
    elif logtype == "video":
        all_logs = VideoLog.objects.filter(user__in=users, video_id__in=items).values(
            'user', 'complete', 'video_id', 'total_seconds_watched', 'completion_timestamp', 'points').order_by('completion_timestamp')
    elif logtype == "activity" and UserLog.is_enabled():
        all_logs = UserLog.objects.filter(user__in=users).values(
            'user', 'last_active_datetime', 'total_seconds').order_by('last_active_datetime')
    elif logtype == "summaryactivity" and UserLog.is_enabled():
        all_logs = UserLogSummary.objects.filter(user__in=users).values(
            'user', 'device', 'total_seconds').order_by('end_datetime')
    else:
        assert False, "Unknown log type: '%s'" % logtype  # indicates a programming error

    for log in all_logs:
        logdict[log['user']].append(log)
    return logdict
Esempio n. 2
0
def generate_fake_coachreport_logs(password="******"):
    try:
        t = FacilityUser.objects.get(facility=Facility.objects.all()[0], username=random.choice(firstnames))
    except FacilityUser.DoesNotExist as e:
        t = FacilityUser(facility=Facility.objects.all()[0], username=random.choice(firstnames))
        t.set_password(password)
        t.save()

    # TODO: create flags later
    num_logs = 20
    logs = []
    for _ in xrange(num_logs):
        date_logged_in = datetime.datetime.now() - datetime.timedelta(days=random.randint(1, 10))
        date_viewed_coachreport = date_logged_in + datetime.timedelta(minutes=random.randint(0, 30))
        date_logged_out = date_viewed_coachreport + datetime.timedelta(minutes=random.randint(0, 30))
        login_log = UserLog.objects.create(
            user=t,
            activity_type=UserLog.get_activity_int("login"),
            start_datetime=date_logged_in,
            last_active_datetime=date_viewed_coachreport,
            end_datetime=date_logged_out,
        )
        logging.info("created login log for teacher %s" % t.username)
        coachreport_log = UserLog.objects.create(
            user=t,
            activity_type=UserLog.get_activity_int("coachreport"),
            start_datetime=date_viewed_coachreport,
            last_active_datetime=date_viewed_coachreport,
            end_datetime=date_viewed_coachreport,
        )
        logs.append((login_log, coachreport_log))
        logging.info("created coachreport log for teacher %s" % t.username)
    return logs
Esempio n. 3
0
def logout(request):
    if "facility_user" in request.session:
        # Logout, ignore any errors.
        try:
            UserLog.end_user_activity(request.session["facility_user"], activity_type="login")
        except ValidationError as e:
            logging.error("Failed to end_user_activity upon logout: %s" % e)
        del request.session["facility_user"]

    auth_logout(request)
    next = request.GET.get("next", reverse("homepage"))
    if next[0] != "/":
        next = "/"
    return HttpResponseRedirect(next)
Esempio n. 4
0
 def logout(self, request, **kwargs):
     self.method_check(request, allowed=['get'])
     if "facility_user" in request.session:
         try:
             # First, update it because this is a real event
             UserLog.update_user_activity(request.session["facility_user"], activity_type="login")
             # ...then end it
             UserLog.end_user_activity(request.session["facility_user"], activity_type="login")
             del request.session['facility_user']
         except ValidationError as e:
             logging.error("Failed to end_user_activity upon logout: %s" % e)
     logout(request)
     return self.create_response(request, {
         'success': True,
         'redirect': reverse("homepage")
         })
Esempio n. 5
0
    def test_query_login_teacher(self):
        """Check the # of queries when logging in as a teacher."""
        teacher = FacilityUser(is_teacher=True, username="******", facility=self.facility)
        passwd = self._gen_valid_password()
        teacher.set_password(passwd)
        teacher.save()

        with self.assertNumQueries(FuzzyInt(25, 43) + 3 * UserLog.is_enabled()):
            self.browser_login_teacher("t1", passwd, self.facility)
Esempio n. 6
0
    def test_query_login_student(self):
        """Check the # of queries when logging in as a student."""
        student = FacilityUser(is_teacher=False, username="******", facility=self.facility)
        passwd = self._gen_valid_password()
        student.set_password(passwd)
        student.save()

        expected_num_queries = 30 + 3*UserLog.is_enabled()
        with self.assertNumQueries(FuzzyInt(expected_num_queries - 3, expected_num_queries + 5)):
            self.browser_login_student("s1", passwd, self.facility)
Esempio n. 7
0
def log_coach_report_view(request):
    """Record coach report view by teacher"""
    if "facility_user" in request.session:
        try:
            # Log a "begin" and end here
            user = request.session["facility_user"]
            UserLog.begin_user_activity(user, activity_type="coachreport")
            UserLog.update_user_activity(user, activity_type="login")  # to track active login time for teachers
            UserLog.end_user_activity(user, activity_type="coachreport")
        except ValidationError as e:
            # Never report this error; don't want this logging to block other functionality.
            logging.error("Failed to update Teacher userlog activity login: %s" % e)
Esempio n. 8
0
def account_management(request):

    # Only log 'coachreport' activity for students,
    #   (otherwise it's hard to compare teachers)
    if "facility_user" in request.session and not request.session["facility_user"].is_teacher and reverse("login") not in request.META.get("HTTP_REFERER", ""):
        try:
            # Log a "begin" and end here
            user = request.session["facility_user"]
            UserLog.begin_user_activity(user, activity_type="coachreport")
            UserLog.update_user_activity(user, activity_type="login")  # to track active login time for teachers
            UserLog.end_user_activity(user, activity_type="coachreport")
        except ValidationError as e:
            # Never report this error; don't want this logging to block other functionality.
            logging.error("Failed to update student userlog activity: %s" % e)

    return student_view_context(request)
Esempio n. 9
0
def account_management(request):

    # Only log 'coachreport' activity for students,
    #   (otherwise it's hard to compare teachers)
    if "facility_user" in request.session and not request.session["facility_user"].is_teacher:
        try:
            # Log a "begin" and end here
            user = request.session["facility_user"]
            UserLog.begin_user_activity(user, activity_type="coachreport")
            UserLog.update_user_activity(user, activity_type="login")  # to track active login time for teachers
            UserLog.end_user_activity(user, activity_type="coachreport")
        except ValidationError as e:
            # Never report this error; don't want this logging to block other functionality.
            logging.error("Failed to update student userlog activity: %s" % e)

    c = student_view_context(request)
    c["restricted"] = settings.DISABLE_SELF_ADMIN
    return c
Esempio n. 10
0
def _get_user_usage_data(users, groups=None, period_start=None, period_end=None, group_id=None):
    """
    Returns facility user data, within the given date range.
    """

    groups = groups or set([user.group for user in users])

    # compute period start and end
    # Now compute stats, based on queried data
    user_data = OrderedDict()
    group_data = OrderedDict()

    # Make queries efficiently
    exercise_logs = ExerciseLog.objects.filter(user__in=users, complete=True)
    video_logs = VideoLog.objects.filter(user__in=users, total_seconds_watched__gt=0)
    login_logs = UserLogSummary.objects.filter(user__in=users)

    # filter results
    login_logs = login_logs.filter(total_seconds__gt=0)
    if period_start:
        exercise_logs = exercise_logs.filter(completion_timestamp__gte=period_start)
        video_logs = video_logs.filter(completion_timestamp__gte=period_start)
    if period_end:
        # MUST: Fix the midnight bug where period end covers up to the prior day only because
        # period end is datetime(year, month, day, hour=0, minute=0), meaning midnight of previous day.
        # Example:
        #   If period_end == '2014-12-01', we cannot include the records dated '2014-12-01 09:30'.
        #   So to fix this, we change it to '2014-12-01 23:59.999999'.
        period_end = dateutil.parser.parse(period_end)
        period_end = period_end + dateutil.relativedelta.relativedelta(days=+1, microseconds=-1)
        exercise_logs = exercise_logs.filter(completion_timestamp__lte=period_end)
        video_logs = video_logs.filter(completion_timestamp__lte=period_end)
    if period_start and period_end:
        exercise_logs = exercise_logs.filter(
            Q(completion_timestamp__gte=period_start) & Q(completion_timestamp__lte=period_end)
        )

        q1 = (
            Q(completion_timestamp__isnull=False)
            & Q(completion_timestamp__gte=period_start)
            & Q(completion_timestamp__lte=period_end)
        )
        video_logs = video_logs.filter(q1)

        login_q1 = (
            Q(start_datetime__gte=period_start)
            & Q(start_datetime__lte=period_end)
            & Q(end_datetime__gte=period_start)
            & Q(end_datetime__lte=period_end)
        )
        login_logs = login_logs.filter(login_q1)
    # Force results in a single query
    exercise_logs = list(exercise_logs.values("exercise_id", "user__pk", "streak_progress"))
    video_logs = list(video_logs.values("video_id", "user__pk"))
    login_logs = list(login_logs.values("activity_type", "total_seconds", "user__pk"))

    for user in users:
        user_data[user.pk] = OrderedDict()
        user_data[user.pk]["id"] = user.pk
        user_data[user.pk]["first_name"] = user.first_name
        user_data[user.pk]["last_name"] = user.last_name
        user_data[user.pk]["username"] = user.username
        user_data[user.pk]["group"] = user.group

        user_data[user.pk]["total_report_views"] = 0  # report_stats["count__sum"] or 0
        user_data[user.pk]["total_logins"] = 0  # login_stats["count__sum"] or 0
        user_data[user.pk]["total_hours"] = 0  # login_stats["total_seconds__sum"] or 0)/3600.

        user_data[user.pk]["total_exercises"] = 0
        user_data[user.pk]["pct_mastery"] = 0.0
        user_data[user.pk]["exercises_mastered"] = []

        user_data[user.pk]["total_videos"] = 0
        user_data[user.pk]["videos_watched"] = []

    for elog in exercise_logs:
        user_data[elog["user__pk"]]["total_exercises"] += 1
        user_data[elog["user__pk"]]["pct_mastery"] += elog["streak_progress"]
        user_data[elog["user__pk"]]["exercises_mastered"].append(elog["exercise_id"])

    for vlog in video_logs:
        user_data[vlog["user__pk"]]["total_videos"] += 1
        user_data[vlog["user__pk"]]["videos_watched"].append(vlog["video_id"])

    for llog in login_logs:
        if llog["activity_type"] == UserLog.get_activity_int("coachreport"):
            user_data[llog["user__pk"]]["total_report_views"] += 1
        elif llog["activity_type"] == UserLog.get_activity_int("login"):
            user_data[llog["user__pk"]]["total_hours"] += (llog["total_seconds"]) / 3600.0
            user_data[llog["user__pk"]]["total_logins"] += 1

    for group in list(groups) + [None] * (
        group_id == None or group_id == UNGROUPED
    ):  # None for ungrouped, if no group_id passed.
        group_pk = getattr(group, "pk", None)
        group_name = getattr(group, "name", _(UNGROUPED))
        group_title = getattr(group, "title", _(UNGROUPED))
        group_data[group_pk] = {
            "id": group_pk,
            "name": group_name,
            "title": group_title,
            "total_logins": 0,
            "total_hours": 0,
            "total_users": 0,
            "total_videos": 0,
            "total_exercises": 0,
            "pct_mastery": 0,
        }

    # Add group data.  Allow a fake group UNGROUPED
    for user in users:
        user_data[user.pk]["pct_mastery"] = user_data[user.pk]["pct_mastery"] / (
            user_data[user.pk]["total_exercises"] or 1
        )
        group_pk = getattr(user.group, "pk", None)
        if group_pk not in group_data:
            logging.error("User %s still in nonexistent group %s!" % (user.id, group_pk))
            continue
        group_data[group_pk]["total_users"] += 1
        group_data[group_pk]["total_logins"] += user_data[user.pk]["total_logins"]
        group_data[group_pk]["total_hours"] += user_data[user.pk]["total_hours"]
        group_data[group_pk]["total_videos"] += user_data[user.pk]["total_videos"]
        group_data[group_pk]["total_exercises"] += user_data[user.pk]["total_exercises"]

        total_mastery_so_far = (
            group_data[group_pk]["pct_mastery"] * (group_data[group_pk]["total_users"] - 1)
            + user_data[user.pk]["pct_mastery"]
        )
        group_data[group_pk]["pct_mastery"] = total_mastery_so_far / group_data[group_pk]["total_users"]

    if len(group_data) == 1 and None in group_data:
        if not group_data[None]["total_users"]:
            del group_data[None]

    return (user_data, group_data)
Esempio n. 11
0
def login(request, facility):
    facility_id = (facility and facility.id) or None
    facilities = list(Facility.objects.all())

    #Fix for #2047: prompt user to create an admin account if none exists
    if not User.objects.exists():
        messages.warning(request, _("No administrator account detected. Please run 'python manage.py createsuperuser' from the terminal to create one."))

    # Fix for #1211: refresh cached facility info when it's free and relevant
    refresh_session_facility_info(request, facility_count=len(facilities))

    if request.method != 'POST':  # render the unbound login form
        referer = urlparse.urlparse(request.META["HTTP_REFERER"]).path if request.META.get("HTTP_REFERER") else None
        # never use the homepage as the referer
        if referer in [reverse("homepage"), reverse("add_facility_student")]:
            referer = None
        form = LoginForm(initial={"facility": facility_id, "callback_url": referer})

    else:  # process the login form
        # log out any Django user or facility user
        logout(request)

        username = request.POST.get("username", "")
        password = request.POST.get("password", "")

        # first try logging in as a Django user
        if not settings.CENTRAL_SERVER:
            user = authenticate(username=username, password=password)
            if user:
                auth_login(request, user)
                return HttpResponseRedirect(request.next or reverse("zone_redirect"))

        # try logging in as a facility user
        form = LoginForm(data=request.POST, request=request, initial={"facility": facility_id})
        if not form.is_valid():
            messages.error(
                request,
                _("There was an error logging you in. Please correct any errors listed below, and try again."),
            )

        else:
            user = form.get_user()

            try:
                UserLog.begin_user_activity(user, activity_type="login", language=request.language)  # Success! Log the event (ignoring validation failures)
            except ValidationError as e:
                logging.error("Failed to begin_user_activity upon login: %s" % e)

            request.session["facility_user"] = user
            messages.success(request, _("You've been logged in! We hope you enjoy your time with KA Lite ") +
                                        _("-- be sure to log out when you finish."))

            # Send them back from whence they came
            landing_page = form.cleaned_data["callback_url"]
            if not landing_page:
                # Just going back to the homepage?  We can do better than that.
                landing_page = reverse("coach_reports") if form.get_user().is_teacher else None
                landing_page = landing_page or (reverse("account_management") if False else reverse("homepage"))  # TODO: pass the redirect as a parameter.

            return HttpResponseRedirect(form.non_field_errors() or request.next or landing_page)

    return {
        "form": form,
        "facilities": facilities,
        "sign_up_url": reverse("add_facility_student"),
    }
Esempio n. 12
0
    def login(self, request, **kwargs):
        self.method_check(request, allowed=['post'])

        logout(request)

        data = self.deserialize(request, request.body, format=request.META.get('CONTENT_TYPE', 'application/json'))

        username = data.get('username', '')
        password = data.get('password', '')
        facility = data.get('facility', '')

        # first try logging in as a Django user
        if not settings.CENTRAL_SERVER:
            user = authenticate(username=username, password=password)
            if user:
                login(request, user)
                return self.create_response(request, {
                    'success': True,
                    'redirect': reverse("zone_redirect")
                    })

        # Find all matching users
        users = FacilityUser.objects.filter(username=username, facility=facility)

        if users.count() == 0:
            if Facility.objects.count() > 1:
                error_message = _("Username and password do not match. Make sure you choose the right facility.")
            else:
                error_message = _("Username and password do not match.")
            return self.create_response(request, {
                'messages': {'error': error_message},
                'error_highlight': "password"
                }, HttpUnauthorized )

        for user in users:
            if settings.SIMPLIFIED_LOGIN and not user.is_teacher:
                # For simplified login, as long as it is a student account just take the first one!
                break
            # if we find a user whose password matches, stop looking
            if user.check_password(password):
                break
            else:
                user = None

        if not user:
            if Facility.objects.count() > 1:
                error_message = _("Username and password do not match. Make sure you choose the right facility.")
            else:
                error_message = _("Username and password do not match.")
            return self.create_response(request, {
                'messages': {'error': error_message},
                'error_highlight': "password"
                }, HttpUnauthorized )
        else:
            try:
                UserLog.begin_user_activity(user, activity_type="login", language=lcode_to_django_lang(request.language))  # Success! Log the event (ignoring validation failures)
            except ValidationError as e:
                logging.error("Failed to begin_user_activity upon login: %s" % e)

            request.session["facility_user"] = user
            messages.success(request, _("You've been logged in! We hope you enjoy your time with KA Lite ")
                + _("-- be sure to log out when you finish."))

            extras = {'success': True}
            if user.is_teacher:
                extras.update({
                    "redirect": reverse("coach_reports", kwargs={"zone_id": getattr(Device.get_own_device().get_zone(), "id", "None")})
                })
            return self.create_response(request, extras)
Esempio n. 13
0
        "key": "user:total_seconds",
        "name": _("Active Time Per Login"),
        "type": "number",
        "description": _("Duration of each login session."),
        "noscatter": True,
        "timeline": True
    },
    {
        "key": "user:last_active_datetime",
        "name": _("Time Session Completed"),
        "type": "datetime",
        "description": _("Day/time the login session finished.")
    },
]

if UserLog.is_enabled():
    stats_dict.extend(user_log_stats_dict)


def get_data_form(request, *args, **kwargs):
    """Get the basic data form, by combining information from
    keyword arguments and the request.REQUEST object.
    Along the way, check permissions to make sure whatever's being requested is OK.

    Request objects get priority over keyword args.
    """
    assert not args, "all non-request args should be keyword args"

    # Pull the form parameters out of the request or
    data = dict()
    # Default to empty string, as it makes template handling cleaner later.
Esempio n. 14
0
def compute_data(data_types, who, where):
    """
    Compute the data in "data_types" for each user in "who", for the topics selected by "where"

    who: list of users
    where: topic_path
    data_types can include:
        pct_mastery
        effort
        attempts
    """

    # None indicates that the data hasn't been queried yet.
    #   We'll query it on demand, for efficiency
    topics = None
    exercises = None
    videos = None

    # Initialize an empty dictionary of data, video logs, exercise logs, for each user
    data = OrderedDict(
        zip([w.id for w in who],
            [dict()
             for i in range(len(who))]))  # maintain the order of the users
    vid_logs = dict(zip([w.id for w in who], [[] for i in range(len(who))]))
    ex_logs = dict(zip([w.id for w in who], [[] for i in range(len(who))]))
    if UserLog.is_enabled():
        activity_logs = dict(
            zip([w.id for w in who], [[] for i in range(len(who))]))

    # Set up queries (but don't run them), so we have really easy aliases.
    #   Only do them if they haven't been done yet (tell this by passing in a value to the lambda function)
    # Topics: topics.
    # Exercises: names (ids for ExerciseLog objects)
    # Videos: video_id (ids for VideoLog objects)

    # This lambda partial creates a function to return all items with a particular path from the NODE_CACHE.
    search_fun_single_path = partial(lambda t, p: t["path"].startswith(p),
                                     p=tuple(where))
    # This lambda partial creates a function to return all items with paths matching a list of paths from NODE_CACHE.
    search_fun_multi_path = partial(
        lambda ts, p: any([t["path"].startswith(p) for t in ts]),
        p=tuple(where))
    # Functions that use the functions defined above to return topics, exercises, and videos based on paths.
    query_topics = partial(
        lambda t, sf: t if t is not None else
        [t["id"] for t in filter(sf,
                                 get_node_cache('Topic').values())],
        sf=search_fun_single_path)
    query_exercises = partial(
        lambda e, sf: e if e is not None else
        [ex["id"] for ex in filter(sf,
                                   get_exercise_cache().values())],
        sf=search_fun_single_path)
    query_videos = partial(
        lambda v, sf: v if v is not None else
        [vid["id"] for vid in filter(sf,
                                     get_node_cache('Content').values())],
        sf=search_fun_single_path)

    # No users, don't bother.
    if len(who) > 0:

        # Query out all exercises, videos, exercise logs, and video logs before looping to limit requests.
        # This means we could pull data for n-dimensional coach report displays with the same number of requests!
        # Note: User activity is polled inside the loop, to prevent possible slowdown for exercise and video reports.
        exercises = query_exercises(exercises)

        videos = query_videos(videos)

        if exercises:
            ex_logs = query_logs(data.keys(), exercises, "exercise", ex_logs)

        if videos:
            vid_logs = query_logs(data.keys(), videos, "video", vid_logs)

        for data_type in (data_types if not hasattr(data_types, "lower") else [
                data_types
        ]):  # convert list from string, if necessary
            if data_type in data[data.keys(
            )[0]]:  # if the first user has it, then all do; no need to calc again.
                continue

            #
            # These are summary stats: you only get one per user
            #
            if data_type == "pct_mastery":

                # Efficient query out, spread out to dict
                for user in data.keys():
                    data[user][
                        data_type] = 0 if not ex_logs[user] else 100. * sum(
                            [el['complete']
                             for el in ex_logs[user]]) / float(len(exercises))

            elif data_type == "effort":
                if "ex:attempts" in data[data.keys(
                )[0]] and "vid:total_seconds_watched" in data[data.keys()[0]]:
                    # exercises and videos would be initialized already
                    for user in data.keys():
                        avg_attempts = 0 if len(exercises) == 0 else sum(
                            data[user]["ex:attempts"].values()) / float(
                                len(exercises))
                        avg_video_points = 0 if len(videos) == 0 else sum(
                            data[user]["vid:total_seconds_watched"].values(
                            )) / float(len(videos))
                        data[user][data_type] = 100. * (
                            0.5 * avg_attempts / 10. +
                            0.5 * avg_video_points / 750.)
                else:
                    data_types += [
                        "ex:attempts", "vid:total_seconds_watched", "effort"
                    ]

            #
            # These are detail stats: you get many per user
            #
            # Just querying out data directly: Video
            elif data_type.startswith("vid:") and data_type[4:] in [
                    f.name for f in VideoLog._meta.fields
            ]:

                for user in data.keys():
                    data[user][data_type] = OrderedDict([
                        (v['video_id'], v[data_type[4:]])
                        for v in vid_logs[user]
                    ])

            # Just querying out data directly: Exercise
            elif data_type.startswith("ex:") and data_type[3:] in [
                    f.name for f in ExerciseLog._meta.fields
            ]:

                for user in data.keys():
                    data[user][data_type] = OrderedDict([
                        (el['exercise_id'], el[data_type[3:]])
                        for el in ex_logs[user]
                    ])

            # User Log Queries
            elif data_type.startswith("user:"******"", "activity",
                                           activity_logs)

                for user in data.keys():
                    data[user][data_type] = [
                        log[data_type[5:]] for log in activity_logs[user]
                    ]

            # User Summary Queries
            elif data_type.startswith("usersum:") and data_type[8:] in [
                    f.name for f in UserLogSummary._meta.fields
            ] and UserLog.is_enabled():

                activity_logs = query_logs(data.keys(), "", "summaryactivity",
                                           activity_logs)

                for user in data.keys():
                    data[user][data_type] = sum(
                        [log[data_type[8:]] for log in activity_logs[user]])
            # Unknown requested quantity
            else:
                raise Exception(
                    "Unknown type: '%s' not in %s" %
                    (data_type, str([f.name
                                     for f in ExerciseLog._meta.fields])))

    # Returning empty list instead of None allows javascript on client
    # side to read 'length' property without error.
    exercises = exercises or []

    videos = videos or []

    return {
        "data": data,
        "topics": topics,
        "exercises": exercises,
        "videos": videos,
    }
Esempio n. 15
0
 def test_query_logout_admin(self):
     """"""
     self.test_query_login_admin()
     with self.assertNumQueries(FuzzyInt(6, 7) + 0*UserLog.is_enabled()):
         self.browser_logout_user()
Esempio n. 16
0
def generate_fake_exercise_logs(facility_user=None, topics=topics, start_date=datetime.datetime.now() - datetime.timedelta(days=30 * 6)):
    """Add exercise logs for the given topics, for each of the given users.
    If no users are given, they are created.
    If no topics exist, they are taken from the list at the top of this file.

    By default, users start learning randomly between 6 months ago and now.
    """

    date_diff = datetime.datetime.now() - start_date
    exercise_logs = []
    user_logs = []

    # It's not a user: probably a list.
    # Recursive case
    if not hasattr(facility_user, "username"):
        # It's NONE :-/ generate the users first!
        if not facility_user:
            (facility_user, _, _) = generate_fake_facility_users()

        for topic in topics:
            for user in facility_user:
                (elogs, ulogs) = generate_fake_exercise_logs(facility_user=user, topics=[topic], start_date=start_date)
                exercise_logs.append(elogs)
                user_logs.append(ulogs)

    # Actually generate!
    else:
        # Get (or create) user type
        try:
            user_settings = json.loads(facility_user.notes)
        except:
            user_settings = sample_user_settings()
            facility_user.notes = json.dumps(user_settings)
            facility_user.save()
        date_diff_started = datetime.timedelta(seconds=datediff(date_diff, units="seconds") * user_settings["time_in_program"])  # when this user started in the program, relative to NOW

        for topic in topics:
            # Get all exercises related to the topic
            exercises = get_topic_exercises(topic_id=topic)

            # Problem:
            #   Not realistic for students to have lots of unfinished exercises.
            #   If they start them, they tend to get stuck, right?
            #
            # So, need to make it more probable that they will finish an exercise,
            #   and less probable that they start one.
            #
            # What we need is P(streak|started), not P(streak)

            # Probability of doing any particular exercise
            p_exercise = probability_of(qty="exercise", user_settings=user_settings)
            logging.debug("# exercises: %d; p(exercise)=%4.3f, user settings: %s\n" % (len(exercises), p_exercise, json.dumps(user_settings)))

            # of exercises is related to
            for j, exercise in enumerate(exercises):
                if random.random() > p_exercise:
                    continue

                # Probability of completing this exercise, and .. proportion of attempts
                p_completed = probability_of(qty="completed", user_settings=user_settings)
                p_attempts = probability_of(qty="attempts", user_settings=user_settings)

                attempts = int(random.random() * p_attempts * 30 + 10)  # always enough to have completed
                completed = (random.random() < p_completed)
                if completed:
                    streak_progress = 100
                else:
                    streak_progress = max(0, min(90, random.gauss(100 * user_settings["speed_of_learning"], 20)))
                    streak_progress = int(floor(streak_progress / 10.)) * 10
                points = streak_progress / 10 * 12 if completed else 0  # only get points when you master.

                # Choose a rate of exercises, based on their effort level and speed of learning.
                #   Compute the latest possible start time.
                #   Then sample a start time between their start time
                #   and the latest possible start_time
                rate_of_exercises = 0.66 * user_settings["effort_level"] + 0.33 * user_settings["speed_of_learning"]  # exercises per day
                time_for_attempts = min(datetime.timedelta(days=rate_of_exercises * attempts), date_diff_started)  # protect with min
                time_delta_completed = datetime.timedelta(seconds=random.randint(int(datediff(time_for_attempts, units="seconds")), int(datediff(date_diff_started, units="seconds"))))
                date_completed = datetime.datetime.now() - time_delta_completed

                # Always create new
                logging.info("Creating exercise log: %-12s: %-25s (%d points, %d attempts, %d%% streak on %s)" % (
                    facility_user.first_name,
                    exercise["name"],
                    points,
                    attempts,
                    streak_progress,
                    date_completed,
                ))
                try:
                    elog = ExerciseLog.objects.get(user=facility_user, exercise_id=exercise["name"])
                except ExerciseLog.DoesNotExist:
                    elog = ExerciseLog(
                        user=facility_user,
                        exercise_id=exercise["name"],
                        attempts=int(attempts),
                        streak_progress=streak_progress,
                        points=int(points),
                        complete=completed,
                        completion_timestamp=date_completed,
                    )
                    try:
                        elog.save()

                        # For now, make all attempts on an exercise into a single UserLog.
                        seconds_per_attempt = 10 * (1 + user_settings["speed_of_learning"] * random.random())
                        time_to_navigate = 15 * (0.5 + random.random())  #between 7.5s and 22.5s
                        time_to_logout = 5 * (0.5 + random.random()) # between 2.5 and 7.5s
                        if UserLog.is_enabled():
                            ulog = UserLog(
                                user=facility_user,
                                activity_type=1,
                                start_datetime = date_completed - datetime.timedelta(seconds=int(attempts * seconds_per_attempt + time_to_navigate)),
                                end_datetime = date_completed + datetime.timedelta(seconds=time_to_logout),
                                last_active_datetime = date_completed,
                            )
                            ulog.save()
                            user_logs.append(ulog)
                    except Exception as e:
                        logging.error("Error saving exercise log: %s" % e)
                        continue
                exercise_logs.append(elog)

    return (exercise_logs, user_logs)
Esempio n. 17
0
def _get_user_usage_data(users, groups=None, period_start=None, period_end=None, group_id=None):
    """
    Returns facility user data, within the given date range.
    """

    groups = groups or set([user.group for user in users])

    # compute period start and end
    # Now compute stats, based on queried data
    num_exercises = len(get_node_cache('Exercise'))
    user_data = OrderedDict()
    group_data = OrderedDict()


    # Make queries efficiently
    exercise_logs = ExerciseLog.objects.filter(user__in=users, complete=True)
    video_logs = VideoLog.objects.filter(user__in=users)
    login_logs = UserLogSummary.objects.filter(user__in=users)

    # filter results
    if period_start:
        exercise_logs = exercise_logs.filter(completion_timestamp__gte=period_start)
        video_logs = video_logs.filter(completion_timestamp__gte=period_start)
        login_logs = login_logs.filter(start_datetime__gte=period_start)
    if period_end:
        exercise_logs = exercise_logs.filter(completion_timestamp__lte=period_end)
        video_logs = video_logs.filter(completion_timestamp__lte=period_end)
        login_logs = login_logs.filter(end_datetime__lte=period_end)


    # Force results in a single query
    exercise_logs = list(exercise_logs.values("exercise_id", "user__pk"))
    video_logs = list(video_logs.values("video_id", "user__pk"))
    login_logs = list(login_logs.values("activity_type", "total_seconds", "user__pk"))

    for user in users:
        user_data[user.pk] = OrderedDict()
        user_data[user.pk]["id"] = user.pk
        user_data[user.pk]["first_name"] = user.first_name
        user_data[user.pk]["last_name"] = user.last_name
        user_data[user.pk]["username"] = user.username
        user_data[user.pk]["group"] = user.group


        user_data[user.pk]["total_report_views"] = 0#report_stats["count__sum"] or 0
        user_data[user.pk]["total_logins"] =0# login_stats["count__sum"] or 0
        user_data[user.pk]["total_hours"] = 0#login_stats["total_seconds__sum"] or 0)/3600.

        user_data[user.pk]["total_exercises"] = 0
        user_data[user.pk]["pct_mastery"] = 0.
        user_data[user.pk]["exercises_mastered"] = []

        user_data[user.pk]["total_videos"] = 0
        user_data[user.pk]["videos_watched"] = []


    for elog in exercise_logs:
        user_data[elog["user__pk"]]["total_exercises"] += 1
        user_data[elog["user__pk"]]["pct_mastery"] += 1. / num_exercises
        user_data[elog["user__pk"]]["exercises_mastered"].append(elog["exercise_id"])

    for vlog in video_logs:
        user_data[vlog["user__pk"]]["total_videos"] += 1
        user_data[vlog["user__pk"]]["videos_watched"].append(vlog["video_id"])

    for llog in login_logs:
        if llog["activity_type"] == UserLog.get_activity_int("coachreport"):
            user_data[llog["user__pk"]]["total_report_views"] += 1
        elif llog["activity_type"] == UserLog.get_activity_int("login"):
            user_data[llog["user__pk"]]["total_hours"] += (llog["total_seconds"]) / 3600.
            user_data[llog["user__pk"]]["total_logins"] += 1

    for group in list(groups) + [None]*(group_id==None or group_id=="Ungrouped"):  # None for ungrouped, if no group_id passed.
        group_pk = getattr(group, "pk", None)
        group_name = getattr(group, "name", _("Ungrouped"))
        group_data[group_pk] = {
            "id": group_pk,
            "name": group_name,
            "total_logins": 0,
            "total_hours": 0,
            "total_users": 0,
            "total_videos": 0,
            "total_exercises": 0,
            "pct_mastery": 0,
        }

    # Add group data.  Allow a fake group "Ungrouped"
    for user in users:
        group_pk = getattr(user.group, "pk", None)
        if group_pk not in group_data:
            logging.error("User %s still in nonexistent group %s!" % (user.id, group_pk))
            continue
        group_data[group_pk]["total_users"] += 1
        group_data[group_pk]["total_logins"] += user_data[user.pk]["total_logins"]
        group_data[group_pk]["total_hours"] += user_data[user.pk]["total_hours"]
        group_data[group_pk]["total_videos"] += user_data[user.pk]["total_videos"]
        group_data[group_pk]["total_exercises"] += user_data[user.pk]["total_exercises"]

        total_mastery_so_far = (group_data[group_pk]["pct_mastery"] * (group_data[group_pk]["total_users"] - 1) + user_data[user.pk]["pct_mastery"])
        group_data[group_pk]["pct_mastery"] =  total_mastery_so_far / group_data[group_pk]["total_users"]

    if len(group_data) == 1 and group_data.has_key(None):
        if not group_data[None]["total_users"]:
            del group_data[None]

    return (user_data, group_data)
Esempio n. 18
0
def _get_user_usage_data(users,
                         groups=None,
                         period_start=None,
                         period_end=None,
                         group_id=None):
    """
    Returns facility user data, within the given date range.
    """

    groups = groups or set([user.group for user in users])

    # compute period start and end
    # Now compute stats, based on queried data
    num_exercises = len(get_node_cache('Exercise'))
    user_data = OrderedDict()
    group_data = OrderedDict()

    # Make queries efficiently
    exercise_logs = ExerciseLog.objects.filter(user__in=users, complete=True)
    video_logs = VideoLog.objects.filter(user__in=users,
                                         total_seconds_watched__gt=0)
    login_logs = UserLogSummary.objects.filter(user__in=users)

    # filter results
    login_logs = login_logs.filter(total_seconds__gt=0)
    if period_start:
        exercise_logs = exercise_logs.filter(
            completion_timestamp__gte=period_start)
        video_logs = video_logs.filter(completion_timestamp__gte=period_start)
    if period_end:
        # MUST: Fix the midnight bug where period end covers up to the prior day only because
        # period end is datetime(year, month, day, hour=0, minute=0), meaning midnight of previous day.
        # Example:
        #   If period_end == '2014-12-01', we cannot include the records dated '2014-12-01 09:30'.
        #   So to fix this, we change it to '2014-12-01 23:59.999999'.
        period_end = dateutil.parser.parse(period_end)
        period_end = period_end + dateutil.relativedelta.relativedelta(
            days=+1, microseconds=-1)
        exercise_logs = exercise_logs.filter(
            completion_timestamp__lte=period_end)
        video_logs = video_logs.filter(completion_timestamp__lte=period_end)
    if period_start and period_end:
        exercise_logs = exercise_logs.filter(
            Q(completion_timestamp__gte=period_start)
            & Q(completion_timestamp__lte=period_end))

        q1 = Q(completion_timestamp__isnull=False) & \
            Q(completion_timestamp__gte=period_start) & \
            Q(completion_timestamp__lte=period_end)
        video_logs = video_logs.filter(q1)

        login_q1 = Q(start_datetime__gte=period_start) & Q(start_datetime__lte=period_end) & \
            Q(end_datetime__gte=period_start) & Q(end_datetime__lte=period_end)
        login_logs = login_logs.filter(login_q1)
    # Force results in a single query
    exercise_logs = list(exercise_logs.values("exercise_id", "user__pk"))
    video_logs = list(video_logs.values("video_id", "user__pk"))
    login_logs = list(
        login_logs.values("activity_type", "total_seconds", "user__pk"))

    for user in users:
        user_data[user.pk] = OrderedDict()
        user_data[user.pk]["id"] = user.pk
        user_data[user.pk]["first_name"] = user.first_name
        user_data[user.pk]["last_name"] = user.last_name
        user_data[user.pk]["username"] = user.username
        user_data[user.pk]["group"] = user.group

        user_data[user.pk][
            "total_report_views"] = 0  #report_stats["count__sum"] or 0
        user_data[
            user.pk]["total_logins"] = 0  # login_stats["count__sum"] or 0
        user_data[user.pk][
            "total_hours"] = 0  #login_stats["total_seconds__sum"] or 0)/3600.

        user_data[user.pk]["total_exercises"] = 0
        user_data[user.pk]["pct_mastery"] = 0.
        user_data[user.pk]["exercises_mastered"] = []

        user_data[user.pk]["total_videos"] = 0
        user_data[user.pk]["videos_watched"] = []

    for elog in exercise_logs:
        user_data[elog["user__pk"]]["total_exercises"] += 1
        user_data[elog["user__pk"]]["pct_mastery"] += 1. / num_exercises
        user_data[elog["user__pk"]]["exercises_mastered"].append(
            elog["exercise_id"])

    for vlog in video_logs:
        user_data[vlog["user__pk"]]["total_videos"] += 1
        user_data[vlog["user__pk"]]["videos_watched"].append(vlog["video_id"])

    for llog in login_logs:
        if llog["activity_type"] == UserLog.get_activity_int("coachreport"):
            user_data[llog["user__pk"]]["total_report_views"] += 1
        elif llog["activity_type"] == UserLog.get_activity_int("login"):
            user_data[llog["user__pk"]]["total_hours"] += (
                llog["total_seconds"]) / 3600.
            user_data[llog["user__pk"]]["total_logins"] += 1

    for group in list(groups) + [None] * (
            group_id == None or group_id
            == "Ungrouped"):  # None for ungrouped, if no group_id passed.
        group_pk = getattr(group, "pk", None)
        group_name = getattr(group, "name", _("Ungrouped"))
        group_data[group_pk] = {
            "id": group_pk,
            "name": group_name,
            "total_logins": 0,
            "total_hours": 0,
            "total_users": 0,
            "total_videos": 0,
            "total_exercises": 0,
            "pct_mastery": 0,
        }

    # Add group data.  Allow a fake group "Ungrouped"
    for user in users:
        group_pk = getattr(user.group, "pk", None)
        if group_pk not in group_data:
            logging.error("User %s still in nonexistent group %s!" %
                          (user.id, group_pk))
            continue
        group_data[group_pk]["total_users"] += 1
        group_data[group_pk]["total_logins"] += user_data[
            user.pk]["total_logins"]
        group_data[group_pk]["total_hours"] += user_data[
            user.pk]["total_hours"]
        group_data[group_pk]["total_videos"] += user_data[
            user.pk]["total_videos"]
        group_data[group_pk]["total_exercises"] += user_data[
            user.pk]["total_exercises"]

        total_mastery_so_far = (group_data[group_pk]["pct_mastery"] *
                                (group_data[group_pk]["total_users"] - 1) +
                                user_data[user.pk]["pct_mastery"])
        group_data[group_pk][
            "pct_mastery"] = total_mastery_so_far / group_data[group_pk][
                "total_users"]

    if len(group_data) == 1 and group_data.has_key(None):
        if not group_data[None]["total_users"]:
            del group_data[None]

    return (user_data, group_data)
Esempio n. 19
0
def tabular_view(request, facility, report_type="exercise"):
    """Tabular view also gets data server-side."""
    # Define how students are ordered--used to be as efficient as possible.
    student_ordering = ["last_name", "first_name", "username"]

    # Get a list of topics (sorted) and groups
    topics = [get_node_cache("Topic").get(tid) for tid in get_knowledgemap_topics()]
    (groups, facilities) = get_accessible_objects_from_logged_in_user(request, facility=facility)
    context = plotting_metadata_context(request, facility=facility)
    context.update(
        {
            # For translators: the following two translations are nouns
            "report_types": (_("exercise"), _("video")),
            "request_report_type": report_type,
            "topics": [{"id": t[0]["id"], "title": t[0]["title"]} for t in topics if t],
        }
    )

    # get querystring info
    topic_id = request.GET.get("topic", "")
    # No valid data; just show generic
    if not topic_id or not re.match("^[\w\-]+$", topic_id):
        return context

    group_id = request.GET.get("group", "")
    if group_id:
        # Narrow by group
        users = FacilityUser.objects.filter(group=group_id, is_teacher=False).order_by(*student_ordering)

    elif facility:
        # Narrow by facility
        search_groups = [groups_dict["groups"] for groups_dict in groups if groups_dict["facility"] == facility.id]
        assert len(search_groups) <= 1, "Should only have one or zero matches."

        # Return groups and ungrouped
        search_groups = search_groups[0]  # make sure to include ungrouped students
        users = FacilityUser.objects.filter(
            Q(group__in=search_groups) | Q(group=None, facility=facility), is_teacher=False
        ).order_by(*student_ordering)

    else:
        # Show all (including ungrouped)
        for groups_dict in groups:
            search_groups += groups_dict["groups"]
        users = FacilityUser.objects.filter(Q(group__in=search_groups) | Q(group=None), is_teacher=False).order_by(
            *student_ordering
        )

    # We have enough data to render over a group of students
    # Get type-specific information
    if report_type == "exercise":
        # Fill in exercises
        exercises = get_topic_exercises(topic_id=topic_id)
        exercises = sorted(exercises, key=lambda e: (e["h_position"], e["v_position"]))
        context["exercises"] = exercises

        # More code, but much faster
        exercise_names = [ex["name"] for ex in context["exercises"]]
        # Get students
        context["students"] = []
        exlogs = (
            ExerciseLog.objects.filter(user__in=users, exercise_id__in=exercise_names)
            .order_by(*["user__%s" % field for field in student_ordering])
            .values("user__id", "struggling", "complete", "exercise_id")
        )
        exlogs = list(exlogs)  # force the query to be evaluated

        exlog_idx = 0
        for user in users:
            log_table = {}
            while exlog_idx < len(exlogs) and exlogs[exlog_idx]["user__id"] == user.id:
                log_table[exlogs[exlog_idx]["exercise_id"]] = exlogs[exlog_idx]
                exlog_idx += 1

            context["students"].append(
                {  # this could be DRYer
                    "first_name": user.first_name,
                    "last_name": user.last_name,
                    "username": user.username,
                    "name": user.get_name(),
                    "id": user.id,
                    "exercise_logs": log_table,
                }
            )

    elif report_type == "video":
        # Fill in videos
        context["videos"] = get_topic_videos(topic_id=topic_id)

        # More code, but much faster
        video_ids = [vid["id"] for vid in context["videos"]]
        # Get students
        context["students"] = []
        vidlogs = (
            VideoLog.objects.filter(user__in=users, video_id__in=video_ids)
            .order_by(*["user__%s" % field for field in student_ordering])
            .values("user__id", "complete", "video_id", "total_seconds_watched", "points")
        )
        vidlogs = list(vidlogs)  # force the query to be executed now

        vidlog_idx = 0
        for user in users:
            log_table = {}
            while vidlog_idx < len(vidlogs) and vidlogs[vidlog_idx]["user__id"] == user.id:
                log_table[vidlogs[vidlog_idx]["video_id"]] = vidlogs[vidlog_idx]
                vidlog_idx += 1

            context["students"].append(
                {  # this could be DRYer
                    "first_name": user.first_name,
                    "last_name": user.last_name,
                    "username": user.username,
                    "name": user.get_name(),
                    "id": user.id,
                    "video_logs": log_table,
                }
            )

    else:
        raise Http404(_("Unknown report_type: %(report_type)s") % {"report_type": report_type})

    if "facility_user" in request.session:
        try:
            # Log a "begin" and end here
            user = request.session["facility_user"]
            UserLog.begin_user_activity(user, activity_type="coachreport")
            UserLog.update_user_activity(user, activity_type="login")  # to track active login time for teachers
            UserLog.end_user_activity(user, activity_type="coachreport")
        except ValidationError as e:
            # Never report this error; don't want this logging to block other functionality.
            logging.error("Failed to update Teacher userlog activity login: %s" % e)

    return context
Esempio n. 20
0
def api_data(request, xaxis="", yaxis=""):
    """Request contains information about what data are requested (who, what, and how).

    Response should be a JSON object
    * data contains the data, structred by user and then datatype
    * the rest of the data is metadata, useful for displaying detailed info about data.
    """

    # Get the request form
    try:
        form = get_data_form(request, xaxis=xaxis,
                             yaxis=yaxis)  # (data=request.REQUEST)
    except Exception as e:
        # In investigating #1509: we can catch SQL errors here and communicate clearer error
        #   messages with the user here.  For now, we have no such error to catch, so just
        #   pass the errors on to the user (via the @api_handle_error_with_json decorator).
        raise e

    # Query out the data: who?
    if form.data.get("user"):
        facility = []
        groups = []
        users = [get_object_or_404(FacilityUser, id=form.data.get("user"))]
    elif form.data.get("group"):
        facility = []
        if form.data.get("group") == "Ungrouped":
            groups = []
            users = FacilityUser.objects.filter(
                facility__in=[form.data.get("facility")],
                group__isnull=True,
                is_teacher=False).order_by("last_name", "first_name")
        else:
            groups = [
                get_object_or_404(FacilityGroup, id=form.data.get("group"))
            ]
            users = FacilityUser.objects.filter(group=form.data.get("group"),
                                                is_teacher=False).order_by(
                                                    "last_name", "first_name")
    elif form.data.get("facility"):
        facility = get_object_or_404(Facility, id=form.data.get("facility"))
        groups = FacilityGroup.objects.filter(
            facility__in=[form.data.get("facility")])
        users = FacilityUser.objects.filter(
            facility__in=[form.data.get("facility")],
            is_teacher=False).order_by("last_name", "first_name")
    else:
        # Allow superuser to see the data.
        if request.user.is_authenticated() and request.user.is_superuser:
            facility = []
            groups = []
            users = FacilityUser.objects.all().order_by(
                "last_name", "first_name")
        else:
            return HttpResponseNotFound(
                _("Did not specify facility, group, nor user."))

    # Query out the data: where?
    if not form.data.get("topic_path"):
        return HttpResponseNotFound(_("Must specify a topic path"))

    # Query out the data: what?
    computed_data = compute_data(
        data_types=[form.data.get("xaxis"),
                    form.data.get("yaxis")],
        who=users,
        where=form.data.get("topic_path"))

    # Quickly add back in exercise meta-data (could potentially be used in future for other data too!)
    ex_nodes = get_node_cache()["Exercise"]
    exercises = []
    for e in computed_data["exercises"]:
        exercises.append({
            "slug": e,
            "full_name": ex_nodes[e]["display_name"],
            "url": ex_nodes[e]["path"],
        })

    json_data = {
        "data":
        computed_data["data"],
        "exercises":
        exercises,
        "videos":
        computed_data["videos"],
        "users":
        dict(
            zip([u.id for u in users], [
                "%s, %s" % (u.last_name, u.first_name)
                if u.last_name or u.first_name else u.username for u in users
            ])),
        "groups":
        dict(
            zip(
                [g.id for g in groups],
                dict(zip(["id", "name"], [(g.id, g.name) for g in groups])),
            )),
        "facility":
        None if not facility else {
            "name": facility.name,
            "id": facility.id,
        }
    }

    if "facility_user" in request.session:
        try:
            # Log a "begin" and end here
            user = request.session["facility_user"]
            UserLog.begin_user_activity(user, activity_type="coachreport")
            UserLog.update_user_activity(
                user, activity_type="login"
            )  # to track active login time for teachers
            UserLog.end_user_activity(user, activity_type="coachreport")
        except ValidationError as e:
            # Never report this error; don't want this logging to block other functionality.
            logging.error(
                "Failed to update Teacher userlog activity login: %s" % e)

    # Now we have data, stream it back with a handler for date-times
    return JsonResponse(json_data)
Esempio n. 21
0
    {"key": "ex:attempts",        "name": _("Attempts"),   "type": "number", "description": _("Number of times submitting an answer to an exercise.")},
    {"key": "ex:streak_progress", "name": _("Streak"),     "type": "number", "description": _("Maximum number of consecutive correct answers on an exercise.")},
    {"key": "ex:points",          "name": _("Exercise points"),    "type": "number", "description": _("[Pointless at the moment; tracks mastery linearly]")},
    { "key": "ex:completion_timestamp", "name": _("Time exercise completed"),"type": "datetime", "description": _("Day/time the exercise was completed.") },
    {"key": "vid:points",          "name": _("Video points"),      "type": "number", "description": _("Points earned while watching a video (750 max / video).")},
    { "key": "vid:total_seconds_watched","name": _("Video time"),   "type": "number", "description": _("Total seconds spent watching a video.") },
    { "key": "vid:completion_timestamp", "name": _("Time video completed"),"type": "datetime", "description": _("Day/time the video was completed.") },
]

user_log_stats_dict = [
    { "key": "usersum:total_seconds", "name": _("Time Active (s)"), "type": "number", "description": _("Total time spent actively logged in.")},
    { "key": "user:total_seconds", "name": _("Active Time Per Login"), "type": "number", "description": _("Duration of each login session."), "noscatter": True, "timeline": True},
    { "key": "user:last_active_datetime", "name": _("Time Session Completed"),"type": "datetime", "description": _("Day/time the login session finished.")},
]

if UserLog.is_enabled():
    stats_dict.extend(user_log_stats_dict)

def get_data_form(request, *args, **kwargs):
    """Get the basic data form, by combining information from
    keyword arguments and the request.REQUEST object.
    Along the way, check permissions to make sure whatever's being requested is OK.

    Request objects get priority over keyword args.
    """
    assert not args, "all non-request args should be keyword args"

    # Pull the form parameters out of the request or
    data = dict()
    # Default to empty string, as it makes template handling cleaner later.
    for field in ["facility", "group", "user", "xaxis", "yaxis"]:
Esempio n. 22
0
 def test_query_logout_student(self):
     """"""
     self.test_query_login_student()
     with self.assertNumQueries(FuzzyInt(2, 11) + 11*UserLog.is_enabled()):
         self.browser_logout_user()
Esempio n. 23
0
def generate_fake_exercise_logs(
    facility_user=None, topics=topics, start_date=datetime.datetime.now() - datetime.timedelta(days=30 * 6)
):
    """Add exercise logs for the given topics, for each of the given users.
    If no users are given, they are created.
    If no topics exist, they are taken from the list at the top of this file.

    By default, users start learning randomly between 6 months ago and now.
    """

    date_diff = datetime.datetime.now() - start_date
    exercise_logs = []
    user_logs = []

    # It's not a user: probably a list.
    # Recursive case
    if not hasattr(facility_user, "username"):
        # It's NONE :-/ generate the users first!
        if not facility_user:
            (facility_user, _, _) = generate_fake_facility_users()

        for topic in topics:
            for user in facility_user:
                (elogs, ulogs) = generate_fake_exercise_logs(facility_user=user, topics=[topic], start_date=start_date)
                exercise_logs.append(elogs)
                user_logs.append(ulogs)

    # Actually generate!
    else:
        # Get (or create) user type
        try:
            user_settings = json.loads(facility_user.notes)
        except:
            user_settings = sample_user_settings()
            facility_user.notes = json.dumps(user_settings)
            facility_user.save()
        date_diff_started = datetime.timedelta(
            seconds=datediff(date_diff, units="seconds") * user_settings["time_in_program"]
        )  # when this user started in the program, relative to NOW

        for topic in topics:
            # Get all exercises related to the topic
            exercises = get_topic_exercises(topic_id=topic)

            # Problem:
            #   Not realistic for students to have lots of unfinished exercises.
            #   If they start them, they tend to get stuck, right?
            #
            # So, need to make it more probable that they will finish an exercise,
            #   and less probable that they start one.
            #
            # What we need is P(streak|started), not P(streak)

            # Probability of doing any particular exercise
            p_exercise = probability_of(qty="exercise", user_settings=user_settings)
            logging.info(
                "# exercises: %d; p(exercise)=%4.3f, user settings: %s\n"
                % (len(exercises), p_exercise, json.dumps(user_settings))
            )

            # of exercises is related to
            for j, exercise in enumerate(exercises):
                if random.random() > p_exercise:
                    continue

                # Probability of completing this exercise, and .. proportion of attempts
                p_attempts = probability_of(qty="attempts", user_settings=user_settings)

                attempts = int(random.random() * p_attempts * 30 + 10)  # always enough to have completed

                elog, created = ExerciseLog.objects.get_or_create(user=facility_user, exercise_id=exercise["id"])

                alogs = []

                for i in range(0, attempts):
                    alog = AttemptLog.objects.create(
                        user=facility_user, exercise_id=exercise["id"], timestamp=start_date + date_diff * i / attempts
                    )
                    alogs.append(alog)
                    if random.random() < user_settings["speed_of_learning"]:
                        alog.correct = True
                        alog.points = 10
                    alog.save()

                elog.attempts = attempts
                elog.latest_activity_timestamp = start_date + date_diff
                elog.streak_progress = sum([log.correct for log in alogs][-10:]) * 10
                elog.points = sum([log.points for log in alogs][-10:])

                elog.save()

                exercise_logs.append(elog)

                ulog = UserLog(
                    user=facility_user,
                    activity_type=1,
                    start_datetime=start_date,
                    end_datetime=start_date + date_diff,
                    last_active_datetime=start_date + date_diff,
                )
                ulog.save()
                user_logs.append(ulog)

    return (exercise_logs, user_logs)
Esempio n. 24
0
def compute_data(data_types, who, where):
    """
    Compute the data in "data_types" for each user in "who", for the topics selected by "where"

    who: list of users
    where: topic_path
    data_types can include:
        pct_mastery
        effort
        attempts
    """

    # None indicates that the data hasn't been queried yet.
    #   We'll query it on demand, for efficiency
    topics = None
    exercises = None
    videos = None

    # Initialize an empty dictionary of data, video logs, exercise logs, for each user
    data = OrderedDict(zip([w.id for w in who], [dict() for i in range(len(who))]))  # maintain the order of the users
    vid_logs = dict(zip([w.id for w in who], [[] for i in range(len(who))]))
    ex_logs = dict(zip([w.id for w in who], [[] for i in range(len(who))]))
    if UserLog.is_enabled():
        activity_logs = dict(zip([w.id for w in who], [[] for i in range(len(who))]))

    # Set up queries (but don't run them), so we have really easy aliases.
    #   Only do them if they haven't been done yet (tell this by passing in a value to the lambda function)
    # Topics: topics.
    # Exercises: names (ids for ExerciseLog objects)
    # Videos: video_id (ids for VideoLog objects)

    # This lambda partial creates a function to return all items with a particular path from the NODE_CACHE.
    search_fun_single_path = partial(lambda t, p: t["path"].startswith(p), p=tuple(where))
    # This lambda partial creates a function to return all items with paths matching a list of paths from NODE_CACHE.
    search_fun_multi_path = partial(lambda ts, p: any([t["path"].startswith(p) for t in ts]),  p=tuple(where))
    # Functions that use the functions defined above to return topics, exercises, and videos based on paths.
    query_topics = partial(lambda t, sf: t if t is not None else [t[0]["id"] for t in filter(sf, get_node_cache('Topic').values())], sf=search_fun_single_path)
    query_exercises = partial(lambda e, sf: e if e is not None else [ex[0]["id"] for ex in filter(sf, get_node_cache('Exercise').values())], sf=search_fun_multi_path)
    query_videos = partial(lambda v, sf: v if v is not None else [vid[0]["id"] for vid in filter(sf, get_node_cache('Video').values())], sf=search_fun_multi_path)

    # No users, don't bother.
    if len(who) > 0:

        # Query out all exercises, videos, exercise logs, and video logs before looping to limit requests.
        # This means we could pull data for n-dimensional coach report displays with the same number of requests!
        # Note: User activity is polled inside the loop, to prevent possible slowdown for exercise and video reports.
        exercises = query_exercises(exercises)

        videos = query_videos(videos)

        if exercises:
            ex_logs = query_logs(data.keys(), exercises, "exercise", ex_logs)

        if videos:
            vid_logs = query_logs(data.keys(), videos, "video", vid_logs)

        for data_type in (data_types if not hasattr(data_types, "lower") else [data_types]):  # convert list from string, if necessary
            if data_type in data[data.keys()[0]]:  # if the first user has it, then all do; no need to calc again.
                continue

            #
            # These are summary stats: you only get one per user
            #
            if data_type == "pct_mastery":

                # Efficient query out, spread out to dict
                for user in data.keys():
                    data[user][data_type] = 0 if not ex_logs[user] else 100. * sum([el['complete'] for el in ex_logs[user]]) / float(len(exercises))

            elif data_type == "effort":
                if "ex:attempts" in data[data.keys()[0]] and "vid:total_seconds_watched" in data[data.keys()[0]]:
                    # exercises and videos would be initialized already
                    for user in data.keys():
                        avg_attempts = 0 if len(exercises) == 0 else sum(data[user]["ex:attempts"].values()) / float(len(exercises))
                        avg_video_points = 0 if len(videos) == 0 else sum(data[user]["vid:total_seconds_watched"].values()) / float(len(videos))
                        data[user][data_type] = 100. * (0.5 * avg_attempts / 10. + 0.5 * avg_video_points / 750.)
                else:
                    data_types += ["ex:attempts", "vid:total_seconds_watched", "effort"]

            #
            # These are detail stats: you get many per user
            #
            # Just querying out data directly: Video
            elif data_type.startswith("vid:") and data_type[4:] in [f.name for f in VideoLog._meta.fields]:

                for user in data.keys():
                    data[user][data_type] = OrderedDict([(v['video_id'], v[data_type[4:]]) for v in vid_logs[user]])

            # Just querying out data directly: Exercise
            elif data_type.startswith("ex:") and data_type[3:] in [f.name for f in ExerciseLog._meta.fields]:

                for user in data.keys():
                    data[user][data_type] = OrderedDict([(el['exercise_id'], el[data_type[3:]]) for el in ex_logs[user]])

            # User Log Queries
            elif data_type.startswith("user:"******"", "activity", activity_logs)

                for user in data.keys():
                    data[user][data_type] = [log[data_type[5:]] for log in activity_logs[user]]

            # User Summary Queries
            elif data_type.startswith("usersum:") and data_type[8:] in [f.name for f in UserLogSummary._meta.fields] and UserLog.is_enabled():

                activity_logs = query_logs(data.keys(), "", "summaryactivity", activity_logs)

                for user in data.keys():
                    data[user][data_type] = sum([log[data_type[8:]] for log in activity_logs[user]])
            # Unknown requested quantity
            else:
                raise Exception("Unknown type: '%s' not in %s" % (data_type, str([f.name for f in ExerciseLog._meta.fields])))

    # Returning empty list instead of None allows javascript on client
    # side to read 'length' property without error.
    exercises = exercises or []

    videos = videos or []

    return {
        "data": data,
        "topics": topics,
        "exercises": exercises,
        "videos": videos,
    }
Esempio n. 25
0
 def test_query_logout_admin(self):
     """"""
     self.test_query_login_admin()
     with self.assertNumQueries(FuzzyInt(6, 7) + 0 * UserLog.is_enabled()):
         self.browser_logout_user()
Esempio n. 26
0
def api_data(request, xaxis="", yaxis=""):
    """Request contains information about what data are requested (who, what, and how).

    Response should be a JSON object
    * data contains the data, structred by user and then datatype
    * the rest of the data is metadata, useful for displaying detailed info about data.
    """

    # Get the request form
    try:
        form = get_data_form(request, xaxis=xaxis, yaxis=yaxis)  # (data=request.REQUEST)
    except Exception as e:
        # In investigating #1509: we can catch SQL errors here and communicate clearer error
        #   messages with the user here.  For now, we have no such error to catch, so just
        #   pass the errors on to the user (via the @api_handle_error_with_json decorator).
        raise e

    # Query out the data: who?
    if form.data.get("user"):
        facility = []
        groups = []
        users = [get_object_or_404(FacilityUser, id=form.data.get("user"))]
    elif form.data.get("group"):
        facility = []
        groups = [get_object_or_404(FacilityGroup, id=form.data.get("group"))]
        users = FacilityUser.objects.filter(group=form.data.get("group"), is_teacher=False).order_by("last_name", "first_name")
    elif form.data.get("facility"):
        facility = get_object_or_404(Facility, id=form.data.get("facility"))
        groups = FacilityGroup.objects.filter(facility__in=[form.data.get("facility")])
        users = FacilityUser.objects.filter(facility__in=[form.data.get("facility")], is_teacher=False).order_by("last_name", "first_name")
    else:
        return HttpResponseNotFound(_("Did not specify facility, group, nor user."))

    # Query out the data: where?
    if not form.data.get("topic_path"):
        return HttpResponseNotFound(_("Must specify a topic path"))

    # Query out the data: what?
    computed_data = compute_data(data_types=[form.data.get("xaxis"), form.data.get("yaxis")], who=users, where=form.data.get("topic_path"))

    # Quickly add back in exercise meta-data (could potentially be used in future for other data too!)
    ex_nodes = get_node_cache()["Exercise"]
    exercises = []
    for e in computed_data["exercises"]:
        exercises.append({
            "slug": e,
            "full_name": ex_nodes[e][0]["display_name"],
            "url": ex_nodes[e][0]["path"],
        })

    json_data = {
        "data": computed_data["data"],
        "exercises": exercises,
        "videos": computed_data["videos"],
        "users": dict(zip([u.id for u in users],
                          ["%s, %s" % (u.last_name, u.first_name) for u in users]
                     )),
        "groups": dict(zip([g.id for g in groups],
                           dict(zip(["id", "name"], [(g.id, g.name) for g in groups])),
                     )),
        "facility": None if not facility else {
            "name": facility.name,
            "id": facility.id,
        }
    }

    if "facility_user" in request.session:
        try:
            # Log a "begin" and end here
            user = request.session["facility_user"]
            UserLog.begin_user_activity(user, activity_type="coachreport")
            UserLog.update_user_activity(user, activity_type="login")  # to track active login time for teachers
            UserLog.end_user_activity(user, activity_type="coachreport")
        except ValidationError as e:
            # Never report this error; don't want this logging to block other functionality.
            logging.error("Failed to update Teacher userlog activity login: %s" % e)

    # Now we have data, stream it back with a handler for date-times
    return JsonResponse(json_data)
Esempio n. 27
0
 def test_query_logout_student(self):
     """"""
     self.test_query_login_student()
     with self.assertNumQueries(
             FuzzyInt(4, 11) + 11 * UserLog.is_enabled()):
         self.browser_logout_user()