def learner_logs(request): page = request.GET.get("page", 1) limit = request.GET.get("limit", 50) # Look back a week by default time_window = request.GET.get("time_window", 7) start_date = request.GET.get("start_date") end_date = request.GET.get("end_date") topic_ids = json.loads(request.GET.get("topic_ids", "[]")) learners = get_learners_from_GET(request) pages = int(ceil(len(learners) / float(limit))) if page * limit < len(learners): learners = learners[(page - 1) * limit:page * limit] log_types = request.GET.getlist("log_type", ["exercise", "video", "content"]) output_logs = [] output_objects = [] end_date = datetime.datetime.strptime( end_date, '%Y/%m/%d') if end_date else datetime.datetime.now() start_date = datetime.datetime.strptime( start_date, '%Y/%m/%d' ) if start_date else end_date - datetime.timedelta(time_window) for log_type in log_types: LogModel, fields, id_field, obj_ids, objects = return_log_type_details( log_type, topic_ids) log_objects = LogModel.objects.filter(user__in=learners, **obj_ids).values(*fields) if not topic_ids: topic_objects = log_objects.filter( latest_activity_timestamp__gte=start_date, latest_activity_timestamp__lte=end_date) if topic_objects.count() == 0: topic_objects = log_objects # Can return multiple items with same id, due to topic tree redundancy, so make unique by id here. objects = dict([(item.get("id"), item) for item in get_topic_nodes( ids=[obj[id_field] for obj in topic_objects]) or []]).values() output_objects.extend(objects) output_logs.extend(log_objects) output_objects = unique_by_id_and_kind_sort(output_objects) return JsonResponse({ # All learner log objects for each content item. "logs": output_logs, # All content items for which logs are being returned. "contents": output_objects, # Sometimes 'learners' gets collapsed to a list from the Queryset. This insures against that eventuality. "learners": [{ "first_name": learner.first_name, "last_name": learner.last_name, "username": learner.username, "pk": learner.pk } for learner in learners], "page": page, "pages": pages, "limit": limit })
def aggregate_learner_logs(request): learners = get_learners_from_GET(request) event_limit = request.GET.get("event_limit", 10) # Look back a week by default time_window = request.GET.get("time_window", 7) start_date = request.GET.get("start_date") end_date = request.GET.get("end_date") topic_ids = json.loads(request.GET.get("topic_ids", "[]")) # Previously, we defaulted to all types of logs, but views on coach reports # seem to assume only exercises # log_types = request.GET.getlist("log_type", ["exercise", "video", "content"]) log_types = request.GET.getlist("log_type", ["exercise"]) output_logs = [] output_dict = { "content_time_spent": 0, "exercise_attempts": 0, "exercise_mastery": None, "total_in_progress": 0, "total_complete": 0, "total_struggling": 0, "total_not_attempted": 0, "available_topics": [], } end_date = datetime.datetime.strptime( end_date, '%Y/%m/%d') if end_date else datetime.datetime.now() start_date = datetime.datetime.strptime( start_date, '%Y/%m/%d' ) if start_date else end_date - datetime.timedelta(time_window) number_content = 0 all_object_ids = set() for log_type in log_types: LogModel, fields, id_field, obj_ids, objects = return_log_type_details( log_type, topic_ids) log_objects = LogModel.objects.filter( user__in=learners, latest_activity_timestamp__gte=start_date, latest_activity_timestamp__lte=end_date, **obj_ids).order_by("-latest_activity_timestamp") number_content += len(set(log_objects.values_list(id_field, flat=True))) output_dict["total_complete"] += log_objects.filter( complete=True).count() if log_type == "video": output_dict["total_in_progress"] += log_objects.filter( complete=False).count() output_dict["content_time_spent"] += log_objects.aggregate( Sum("total_seconds_watched") )["total_seconds_watched__sum"] or 0 elif log_type == "content": output_dict["total_in_progress"] += log_objects.filter( complete=False).count() output_dict["content_time_spent"] += log_objects.aggregate( Sum("time_spent"))["time_spent__sum"] or 0 elif log_type == "exercise": output_dict["total_struggling"] += log_objects.filter( struggling=True).count() output_dict["total_in_progress"] += log_objects.filter( complete=False, struggling=False).count() # Summarize struggling, in progress, and completed output_dict["exercise_attempts"] += output_dict[ "total_struggling"] + output_dict[ "total_complete"] + output_dict["total_in_progress"] # The below doesn't filter correctly, suspecting either bad # AttemptLog generated in generaterealdata or because timestamp # isn't correctly updated # output_dict["exercise_attempts"] = AttemptLog.objects.filter(user__in=learners, # timestamp__gte=start_date, # timestamp__lte=end_date, **obj_ids).count() if log_objects.aggregate(Avg( "streak_progress"))["streak_progress__avg"] is not None: output_dict["exercise_mastery"] = round( log_objects.aggregate( Avg("streak_progress"))["streak_progress__avg"]) output_logs.extend(log_objects) object_buffer = LogModel.objects.filter( user__in=learners, latest_activity_timestamp__gte=start_date, latest_activity_timestamp__lte=end_date).values_list(id_field, flat=True) if len(object_buffer) > 1: all_object_ids.update(object_buffer) elif len(object_buffer) == 1: all_object_ids.add(object_buffer) if len(all_object_ids) > 0: output_dict["available_topics"] = map( lambda x: { "id": x.get("id"), "title": x.get("title") }, get_content_parents(ids=list(all_object_ids))) output_dict["total_not_attempted"] = number_content * len(learners) - ( output_dict["total_complete"] + output_dict["total_struggling"] + output_dict["total_in_progress"]) # Report total time in hours output_dict["content_time_spent"] = round( output_dict["content_time_spent"] / 3600.0, 1) output_logs.sort(key=lambda x: x.latest_activity_timestamp, reverse=True) learner_event_objects = dict([ (item["id"], item) for item in get_topic_nodes(ids=[ getattr(log, "exercise_id", getattr(log, "video_id", getattr(log, "content_id", ""))) for log in output_logs[:event_limit] ], language=request.language) or [] ]) output_dict["learner_events"] = [{ "learner": log.user.get_name(), "complete": log.complete, "struggling": getattr(log, "struggling", None), "progress": getattr(log, "streak_progress", getattr(log, "progress", None)), "content": learner_event_objects.get( getattr(log, "exercise_id", getattr(log, "video_id", getattr(log, "content_id", ""))), {}), } for log in output_logs[:event_limit]] output_dict["total_time_logged"] = round((UserLogSummary.objects\ .filter(user__in=learners, start_datetime__gte=start_date, start_datetime__lte=end_date)\ .aggregate(Sum("total_seconds")).get("total_seconds__sum") or 0)/3600.0, 1) return JsonResponse(output_dict)
def installed_language_packs(request): installed_languages = get_installed_language_packs(force=True).copy() if installed_languages['en']['language_pack_version'] == 0: del installed_languages['en'] return JsonResponse(installed_languages.values())
def check_update_progress(request, process_log): """ API endpoint for getting progress data on downloads. """ return JsonResponse(_process_log_to_dict(process_log))
def aggregate_learner_logs(request): lang = request.language learners = get_learners_from_GET(request) event_limit = request.GET.get("event_limit", 10) # Look back a week by default time_window = request.GET.get("time_window", 7) start_date = request.GET.get("start_date", None) end_date = request.GET.get("end_date", None) topic_ids = request.GET.getlist("topic_id", []) log_types = request.GET.getlist("log_type", ["exercise", "video", "content"]) output_logs = [] output_dict = { "content_time_spent": 0, "exercise_attempts": 0, "exercise_mastery": None, } end_date = datetime.datetime.strptime(end_date,'%Y/%m/%d') if end_date else datetime.datetime.now() start_date = datetime.datetime.strptime(start_date,'%Y/%m/%d') if start_date else end_date - datetime.timedelta(time_window) for log_type in log_types: LogModel, fields, id_field, obj_ids, objects = return_log_type_details(log_type, topic_ids) log_objects = LogModel.objects.filter( user__in=learners, latest_activity_timestamp__gte=start_date, latest_activity_timestamp__lte=end_date, **obj_ids).order_by("-latest_activity_timestamp") if log_type == "video": output_dict["content_time_spent"] += log_objects.aggregate(Sum("total_seconds_watched"))["total_seconds_watched__sum"] or 0 elif log_type == "content": output_dict["content_time_spent"] += log_objects.aggregate(Sum("time_spent"))["time_spent__sum"] or 0 elif log_type == "exercise": output_dict["exercise_attempts"] = AttemptLog.objects.filter(user__in=learners, timestamp__gte=start_date, timestamp__lte=end_date).count() if log_objects.aggregate(Avg("streak_progress"))["streak_progress__avg"] is not None: output_dict["exercise_mastery"] = round(log_objects.aggregate(Avg("streak_progress"))["streak_progress__avg"]) output_logs.extend(log_objects) # Report total time in hours output_dict["content_time_spent"] = round(output_dict["content_time_spent"]/3600.0,1) output_logs.sort(key=lambda x: x.latest_activity_timestamp, reverse=True) output_dict["learner_events"] = [{ "learner": log.user.get_name(), "complete": log.complete, "struggling": getattr(log, "struggling", None), "progress": getattr(log, "streak_progress", getattr(log, "progress", None)), "content": get_exercise_cache(language=lang).get(getattr(log, "exercise_id", "")) or get_content_cache(language=lang).get(getattr(log, "video_id", None) or getattr(log, "content_id", "")) or {} } for log in output_logs[:event_limit]] output_dict["total_time_logged"] = round((UserLogSummary.objects\ .filter(user__in=learners, start_datetime__gte=start_date, start_datetime__lte=end_date)\ .aggregate(Sum("total_seconds")).get("total_seconds__sum") or 0)/3600.0, 1) return JsonResponse(output_dict)
def topic_tree(request, channel): parent = request.GET.get("parent") return JsonResponse( get_topic_nodes(channel=channel, language=request.language, parent=parent))
def assessment_item(request, assessment_item_id): assessment_item_dict = get_assessment_item_data( channel=getattr(request, "channel", "khan"), language=getattr(request, "language", "en"), assessment_item_id=assessment_item_id) return JsonResponse(assessment_item_dict)
def get_update_topic_tree(request): parent = request.GET.get("parent") lang_code = request.GET.get("lang") or request.language # Get annotations for the current language. return JsonResponse(get_topic_update_nodes(parent=parent, language=lang_code))
def installed_language_packs(request): return JsonResponse(get_installed_language_packs(force=True).values())