Exemplo n.º 1
0
    def user_progress_detail(cls, user_id, playlist_id, language=None):
        """
        Return a list of video, exercise, and quiz log PlaylistProgressDetail
        objects associated with a specific user and playlist ID.
        """
        user = FacilityUser.objects.get(id=user_id)
        playlist = get_topic_node(content_id=playlist_id)

        pl_video_ids, pl_exercise_ids = cls.get_playlist_entry_ids(playlist)

        # Retrieve video, exercise, and quiz logs that appear in this playlist
        user_vid_logs, user_ex_logs = cls.get_user_logs(
            user, pl_video_ids, pl_exercise_ids)

        # Finally, sort an ordered list of the playlist entries, with user progress
        # injected where it exists.
        progress_details = list()
        for leaf_node in get_topic_nodes(parent=playlist_id,
                                         language=language):
            entity_id = leaf_node.get("id")
            kind = leaf_node.get("kind")

            status = "notstarted"
            score = 0

            if kind == "Video":
                vid_log = next((vid_log for vid_log in user_vid_logs
                                if vid_log["video_id"] == entity_id), None)
                if vid_log:
                    if vid_log.get("complete"):
                        status = "complete"
                    elif vid_log.get("total_seconds_watched"):
                        status = "inprogress"

                    score = int(
                        float(vid_log.get("points")) / float(750) * 100)

            elif kind == "Exercise":
                ex_log = next((ex_log for ex_log in user_ex_logs
                               if ex_log["exercise_id"] == entity_id), None)
                if ex_log:
                    if ex_log.get("struggling"):
                        status = "struggling"
                    elif ex_log.get("complete"):
                        status = "complete"
                    elif ex_log.get("attempts"):
                        status = "inprogress"

                    score = ex_log.get('streak_progress')

            progress_details.append(
                PlaylistProgressDetail(id=entity_id,
                                       title=leaf_node["title"],
                                       kind=kind,
                                       status=status,
                                       score=score,
                                       path=leaf_node["path"]))

        return progress_details
Exemplo n.º 2
0
    def user_progress_detail(cls, user_id, playlist_id, language=None):
        """
        Return a list of video, exercise, and quiz log PlaylistProgressDetail
        objects associated with a specific user and playlist ID.
        """
        user = FacilityUser.objects.get(id=user_id)
        playlist = get_topic_node(content_id=playlist_id)

        pl_video_ids, pl_exercise_ids = cls.get_playlist_entry_ids(playlist)

        # Retrieve video, exercise, and quiz logs that appear in this playlist
        user_vid_logs, user_ex_logs = cls.get_user_logs(user, pl_video_ids, pl_exercise_ids)

        # Finally, sort an ordered list of the playlist entries, with user progress
        # injected where it exists.
        progress_details = list()
        for leaf_node in get_topic_nodes(parent=playlist_id, language=language):
            entity_id = leaf_node.get("id")
            kind = leaf_node.get("kind")

            status = "notstarted"
            score = 0

            if kind == "Video":
                vid_log = next((vid_log for vid_log in user_vid_logs if vid_log["video_id"] == entity_id), None)
                if vid_log:
                    if vid_log.get("complete"):
                        status = "complete"
                    elif vid_log.get("total_seconds_watched"):
                        status = "inprogress"

                    score = int(float(vid_log.get("points")) / float(750) * 100)

            elif kind == "Exercise":
                ex_log = next((ex_log for ex_log in user_ex_logs if ex_log["exercise_id"] == entity_id), None)
                if ex_log:
                    if ex_log.get("struggling"):
                        status = "struggling"
                    elif ex_log.get("complete"):
                        status = "complete"
                    elif ex_log.get("attempts"):
                        status = "inprogress"

                    score = ex_log.get('streak_progress')

            progress_details.append(PlaylistProgressDetail(
                id=entity_id,
                title=leaf_node["title"],
                kind=kind,
                status=status,
                score=score,
                path=leaf_node["path"]
            ))

        return progress_details
Exemplo n.º 3
0
 def test_topic_availability(self):
     nodes = get_content_items()
     for topic in nodes:
         if topic.get("kind") == "Topic":
             any_available = any([
                 item.get("available", False)
                 for item in get_topic_nodes(parent=topic.get("id"))
             ])
             self.assertEqual(
                 topic["available"], any_available,
                 "Topic availability for {topic} did not match child availability when any children are available."
                 .format(topic=topic.get("title")))
Exemplo n.º 4
0
 def test_topic_availability(self):
     nodes = get_content_items()
     for topic in nodes:
         if topic.get("kind") == "Topic":
             any_available = any([item.get("available", False) for item in get_topic_nodes(parent=topic.get("id"))])
             self.assertEqual(
                 topic["available"],
                 any_available,
                 "Topic availability for {topic} did not match child availability when any children are available.".format(
                     topic=topic.get("title")
                 ),
             )
 def test_get_topic_nodes(self):
     """ Test for issue #3997 -- only "available" items should be sent to the sidebar """
     children = get_topic_nodes(parent="1")
     self.assertEqual(children, [{
         'available': True,
         'description': self.available_item.description,
         'id': self.available_item.id,
         'kind': self.available_item.kind,
         'path': self.available_item.path,
         'slug': self.available_item.slug,
         'title': self.available_item.title,
     }])
Exemplo n.º 6
0
 def test_get_topic_nodes(self):
     """ Test for issue #3997 -- only "available" items should be sent to the sidebar """
     children = get_topic_nodes(parent=self.content_root)
     for child in children:
         self.assertTrue(child.available)
Exemplo n.º 7
0
def topic_tree(request, channel):
    parent = request.GET.get("parent")
    return JsonResponse(
        get_topic_nodes(channel=channel,
                        language=request.language,
                        parent=parent))
Exemplo n.º 8
0
def topic_tree(request, channel):
    parent = request.GET.get("parent")
    return JsonResponse(get_topic_nodes(channel=channel, language=request.language, parent=parent))
Exemplo n.º 9
0
def learner_logs(request):

    page = request.GET.get("page", 1)

    limit = request.GET.get("limit", 50)

    # Look back a week by default
    time_window = request.GET.get("time_window", 7)

    start_date = request.GET.get("start_date")

    end_date = request.GET.get("end_date")

    topic_ids = json.loads(request.GET.get("topic_ids", "[]"))

    learners = get_learners_from_GET(request)

    pages = int(ceil(len(learners)/float(limit)))

    if page*limit < len(learners):

        learners = learners[(page - 1)*limit: page*limit]

    log_types = request.GET.getlist("log_type", ["exercise", "video", "content"])

    output_logs = []

    output_objects = []

    end_date = datetime.datetime.strptime(end_date,'%Y/%m/%d') if end_date else datetime.datetime.now()

    start_date = datetime.datetime.strptime(start_date,'%Y/%m/%d') if start_date else end_date - datetime.timedelta(time_window)

    for log_type in log_types:
        LogModel, fields, id_field, obj_ids, objects = return_log_type_details(log_type, topic_ids)

        log_objects = LogModel.objects.filter(user__in=learners, **obj_ids).values(*fields)
        if not topic_ids:
            topic_objects = log_objects.filter(latest_activity_timestamp__gte=start_date, latest_activity_timestamp__lte=end_date)
            if topic_objects.count() == 0:
                topic_objects = log_objects
            # Can return multiple items with same id, due to topic tree redundancy, so make unique by id here.
            objects = dict([(item.get("id"), item) for item in get_topic_nodes(ids=[obj[id_field] for obj in topic_objects]) or []]).values()
        output_objects.extend(objects)
        output_logs.extend(log_objects)

    output_objects = unique_by_id_and_kind_sort(output_objects)

    return JsonResponse({
        # All learner log objects for each content item.
        "logs": output_logs,
        # All content items for which logs are being returned.
        "contents": output_objects,
        # Sometimes 'learners' gets collapsed to a list from the Queryset. This insures against that eventuality.
        "learners": [{
            "first_name": learner.first_name,
            "last_name": learner.last_name,
            "username": learner.username,
            "pk": learner.pk
            } for learner in learners],
        "page": page,
        "pages": pages,
        "limit": limit
    })
Exemplo n.º 10
0
def aggregate_learner_logs(request):

    learners = get_learners_from_GET(request)

    event_limit = request.GET.get("event_limit", 10)

    # Look back a week by default
    time_window = request.GET.get("time_window", 7)

    start_date = request.GET.get("start_date")

    end_date = request.GET.get("end_date")

    topic_ids = json.loads(request.GET.get("topic_ids", "[]"))

    log_types = request.GET.getlist("log_type", ["exercise", "video", "content"])

    output_logs = []

    output_dict = {
        "content_time_spent": 0,
        "exercise_attempts": 0,
        "exercise_mastery": None,
        "total_in_progress": 0,
        "total_complete": 0,
        "total_struggling": 0,
        "total_not_attempted": 0,
        "available_topics": [],
    }

    end_date = datetime.datetime.strptime(end_date,'%Y/%m/%d') if end_date else datetime.datetime.now()

    start_date = datetime.datetime.strptime(start_date,'%Y/%m/%d') if start_date else end_date - datetime.timedelta(time_window)

    number_content = 0

    all_object_ids = set()

    for log_type in log_types:

        LogModel, fields, id_field, obj_ids, objects = return_log_type_details(log_type, topic_ids)   

        log_objects = LogModel.objects.filter(
            user__in=learners,
            latest_activity_timestamp__gte=start_date,
            latest_activity_timestamp__lte=end_date, **obj_ids).order_by("-latest_activity_timestamp")

        number_content += len(set(log_objects.values_list(id_field, flat=True)))

        if log_type == "video":
            output_dict["total_in_progress"] += log_objects.filter(complete=False).count()
            output_dict["content_time_spent"] += log_objects.aggregate(Sum("total_seconds_watched"))["total_seconds_watched__sum"] or 0
        elif log_type == "content":
            output_dict["total_in_progress"] += log_objects.filter(complete=False).count()
            output_dict["content_time_spent"] += log_objects.aggregate(Sum("time_spent"))["time_spent__sum"] or 0
        elif log_type == "exercise":
            output_dict["total_struggling"] = log_objects.filter(struggling=True).count()
            output_dict["total_in_progress"] += log_objects.filter(complete=False, struggling=False).count()
            output_dict["exercise_attempts"] = AttemptLog.objects.filter(user__in=learners,
                timestamp__gte=start_date,
                timestamp__lte=end_date, **obj_ids).count()
            if log_objects.aggregate(Avg("streak_progress"))["streak_progress__avg"] is not None:
                output_dict["exercise_mastery"] = round(log_objects.aggregate(Avg("streak_progress"))["streak_progress__avg"])
        output_logs.extend(log_objects)
        output_dict["total_complete"] += log_objects.filter(complete=True).count()

        object_buffer = LogModel.objects.filter(
            user__in=learners,
            latest_activity_timestamp__gte=start_date,
            latest_activity_timestamp__lte=end_date).values_list(id_field, flat=True)

        if len(object_buffer) > 1:
            all_object_ids.update(object_buffer)
        elif len(object_buffer) == 1:
            all_object_ids.add(object_buffer)
    if len(all_object_ids) > 0:
        output_dict["available_topics"] = map(lambda x: {"id": x.get("id"), "title": x.get("title")}, get_content_parents(ids=list(all_object_ids)))
    output_dict["total_not_attempted"] = number_content*len(learners) - (
        output_dict["total_complete"] + output_dict["total_struggling"] + output_dict["total_in_progress"])
    # Report total time in hours
    output_dict["content_time_spent"] = round(output_dict["content_time_spent"]/3600.0,1)
    output_logs.sort(key=lambda x: x.latest_activity_timestamp, reverse=True)

    learner_event_objects = dict([(item["id"], item) for item in get_topic_nodes(
        ids=[getattr(log, "exercise_id", getattr(log, "video_id", getattr(log, "content_id", ""))) for log in output_logs[:event_limit]], language=request.language) or []])

    output_dict["learner_events"] = [{
        "learner": log.user.get_name(),
        "complete": log.complete,
        "struggling": getattr(log, "struggling", None),
        "progress": getattr(log, "streak_progress", getattr(log, "progress", None)),
        "content": learner_event_objects.get(getattr(log, "exercise_id", getattr(log, "video_id", getattr(log, "content_id", ""))), {}),
        } for log in output_logs[:event_limit]]
    output_dict["total_time_logged"] = round((UserLogSummary.objects\
        .filter(user__in=learners, start_datetime__gte=start_date, start_datetime__lte=end_date)\
        .aggregate(Sum("total_seconds")).get("total_seconds__sum") or 0)/3600.0, 1)
    return JsonResponse(output_dict)
Exemplo n.º 11
0
def learner_logs(request):

    page = request.GET.get("page", 1)

    limit = request.GET.get("limit", 50)

    # Look back a week by default
    time_window = request.GET.get("time_window", 7)

    start_date = request.GET.get("start_date")

    end_date = request.GET.get("end_date")

    topic_ids = json.loads(request.GET.get("topic_ids", "[]"))

    learners = get_learners_from_GET(request)

    pages = int(ceil(len(learners) / float(limit)))

    if page * limit < len(learners):

        learners = learners[(page - 1) * limit:page * limit]

    log_types = request.GET.getlist("log_type",
                                    ["exercise", "video", "content"])

    output_logs = []

    output_objects = []

    end_date = datetime.datetime.strptime(
        end_date, '%Y/%m/%d') if end_date else datetime.datetime.now()

    start_date = datetime.datetime.strptime(
        start_date, '%Y/%m/%d'
    ) if start_date else end_date - datetime.timedelta(time_window)

    for log_type in log_types:
        LogModel, fields, id_field, obj_ids, objects = return_log_type_details(
            log_type, topic_ids, request.language)

        log_objects = LogModel.objects.filter(user__in=learners,
                                              **obj_ids).values(*fields)
        if not topic_ids:
            topic_objects = log_objects.filter(
                latest_activity_timestamp__gte=start_date,
                latest_activity_timestamp__lte=end_date)
            if topic_objects.count() == 0:
                topic_objects = log_objects
            # Can return multiple items with same id, due to topic tree redundancy, so make unique by id here.
            objects = dict([
                (item.get("id"), item) for item in
                get_topic_nodes(ids=[obj[id_field] for obj in topic_objects],
                                language=request.language) or []
            ]).values()
        output_objects.extend(objects)
        output_logs.extend(log_objects)

    output_objects = unique_by_id_and_kind_sort(output_objects)

    return JsonResponse({
        # All learner log objects for each content item.
        "logs":
        output_logs,
        # All content items for which logs are being returned.
        "contents":
        output_objects,
        # Sometimes 'learners' gets collapsed to a list from the Queryset. This insures against that eventuality.
        "learners": [{
            "first_name": learner.first_name,
            "last_name": learner.last_name,
            "username": learner.username,
            "pk": learner.pk
        } for learner in learners],
        "page":
        page,
        "pages":
        pages,
        "limit":
        limit
    })
Exemplo n.º 12
0
def aggregate_learner_logs(request):

    learners = get_learners_from_GET(request)

    event_limit = request.GET.get("event_limit", 10)

    # Look back a week by default
    time_window = request.GET.get("time_window", 7)

    start_date = request.GET.get("start_date")

    end_date = request.GET.get("end_date")

    topic_ids = json.loads(request.GET.get("topic_ids", "[]"))

    # Previously, we defaulted to all types of logs, but views on coach reports
    # seem to assume only exercises
    # log_types = request.GET.getlist("log_type", ["exercise", "video", "content"])
    log_types = request.GET.getlist("log_type", ["exercise"])

    output_logs = []

    output_dict = {
        "content_time_spent": 0,
        "exercise_attempts": 0,
        "exercise_mastery": None,
        "total_in_progress": 0,
        "total_complete": 0,
        "total_struggling": 0,
        "total_not_attempted": 0,
        "available_topics": [],
    }

    end_date = datetime.datetime.strptime(
        end_date, '%Y/%m/%d') if end_date else datetime.datetime.now()

    start_date = datetime.datetime.strptime(
        start_date, '%Y/%m/%d'
    ) if start_date else end_date - datetime.timedelta(time_window)

    number_content = 0

    all_object_ids = set()

    for log_type in log_types:

        LogModel, fields, id_field, obj_ids, objects = return_log_type_details(
            log_type, topic_ids, request.language)

        log_objects = LogModel.objects.filter(
            user__in=learners,
            latest_activity_timestamp__gte=start_date,
            latest_activity_timestamp__lte=end_date,
            **obj_ids).order_by("-latest_activity_timestamp")

        number_content += len(set(log_objects.values_list(id_field,
                                                          flat=True)))

        output_dict["total_complete"] += log_objects.filter(
            complete=True).count()
        if log_type == "video":
            output_dict["total_in_progress"] += log_objects.filter(
                complete=False).count()
            output_dict["content_time_spent"] += log_objects.aggregate(
                Sum("total_seconds_watched")
            )["total_seconds_watched__sum"] or 0
        elif log_type == "content":
            output_dict["total_in_progress"] += log_objects.filter(
                complete=False).count()
            output_dict["content_time_spent"] += log_objects.aggregate(
                Sum("time_spent"))["time_spent__sum"] or 0
        elif log_type == "exercise":
            output_dict["total_struggling"] += log_objects.filter(
                struggling=True).count()
            output_dict["total_in_progress"] += log_objects.filter(
                complete=False, struggling=False).count()

            # Summarize struggling, in progress, and completed
            output_dict["exercise_attempts"] += output_dict[
                "total_struggling"] + output_dict[
                    "total_complete"] + output_dict["total_in_progress"]
            # The below doesn't filter correctly, suspecting either bad
            # AttemptLog generated in generaterealdata or because timestamp
            # isn't correctly updated
            # output_dict["exercise_attempts"] = AttemptLog.objects.filter(user__in=learners,
            #     timestamp__gte=start_date,
            #     timestamp__lte=end_date, **obj_ids).count()
            if log_objects.aggregate(Avg(
                    "streak_progress"))["streak_progress__avg"] is not None:
                output_dict["exercise_mastery"] = round(
                    log_objects.aggregate(
                        Avg("streak_progress"))["streak_progress__avg"])
        output_logs.extend(log_objects)

        object_buffer = LogModel.objects.filter(
            user__in=learners,
            latest_activity_timestamp__gte=start_date,
            latest_activity_timestamp__lte=end_date).values_list(id_field,
                                                                 flat=True)

        if len(object_buffer) > 1:
            all_object_ids.update(object_buffer)
        elif len(object_buffer) == 1:
            all_object_ids.add(object_buffer)
    if len(all_object_ids) > 0:
        output_dict["available_topics"] = map(
            lambda x: {
                "id": x.get("id"),
                "title": x.get("title")
            },
            get_content_parents(ids=list(all_object_ids),
                                language=request.language))
    output_dict["total_not_attempted"] = number_content * len(learners) - (
        output_dict["total_complete"] + output_dict["total_struggling"] +
        output_dict["total_in_progress"])
    # Report total time in hours
    output_dict["content_time_spent"] = round(
        output_dict["content_time_spent"] / 3600.0, 1)
    output_logs.sort(key=lambda x: x.latest_activity_timestamp, reverse=True)

    learner_event_objects = dict([
        (item["id"], item)
        for item in get_topic_nodes(ids=[
            getattr(log, "exercise_id",
                    getattr(log, "video_id", getattr(log, "content_id", "")))
            for log in output_logs[:event_limit]
        ],
                                    language=request.language) or []
    ])

    output_dict["learner_events"] = [{
        "learner":
        log.user.get_name(),
        "complete":
        log.complete,
        "struggling":
        getattr(log, "struggling", None),
        "progress":
        getattr(log, "streak_progress", getattr(log, "progress", None)),
        "content":
        learner_event_objects.get(
            getattr(log, "exercise_id",
                    getattr(log, "video_id", getattr(log, "content_id", ""))),
            {}),
    } for log in output_logs[:event_limit]]
    output_dict["total_time_logged"] = round((UserLogSummary.objects\
        .filter(user__in=learners, start_datetime__gte=start_date, start_datetime__lte=end_date)\
        .aggregate(Sum("total_seconds")).get("total_seconds__sum") or 0)/3600.0, 1)
    return JsonResponse(output_dict)
 def test_get_topic_nodes(self):
     """ Test for issue #3997 -- only "available" items should be sent to the sidebar """
     children = get_topic_nodes(parent=self.content_root)
     for child in children:
         self.assertTrue(child.available)