def get_grades(self, target_block_id, limit_hint=None): """ Get the grades as a percentage for the top students who have started or completed target_block_id and its descendants. The current implementation is as follows: This block 'grade_source' class calls the get_score() grading computation built into edX which returns the grade for the current student only, which is then cached in the grade_leaderboard xblock in a user_state_summary field. The block will then use the cached grades from all students to compute the leaderboard. Pros: * Fairly efficient - generally uses cached grades, making one MySQL query per block * Grading should match edX grading very well since it uses the same method * Supports all block types Cons: * Requires students to view the block before their grades appear in the leaderboard - could result in missing/outdated data, depending on where the block appears in the course and how often students view the leaderboard. * Storing data in Scope.user_state_summary will lead to read write conflicts. e.g. if two students view the courseware simultaneously and their requests are handled by different gunicorn processes. TODO: We will need to replace this implementation with something that uses celery to update student grades asynchronously and stores them in a more robust cache format. """ total_correct, total_possible = 0, 0 course_id = target_block_id.course_key.for_branch(None).version_agnostic() target_block = self.host_block.runtime.get_block(target_block_id) student = self.host_block.runtime.get_real_user(self.host_block.runtime.anonymous_student_id) def create_module(descriptor): return target_block.runtime.get_block(descriptor.location) for module_descriptor in yield_dynamic_descriptor_descendents(target_block, create_module): (correct, total) = get_score(course_id, student, module_descriptor, create_module) if (correct is None and total is None) or (not total > 0): continue # Note we ignore the 'graded' flag since authors may wish to use a leaderboard for non-graded content total_correct += correct total_possible += total percent = int(round(float(total_correct) / total_possible * 100)) with self.update_grades_cache(target_block_id) as cache: cache[unicode(student.id)] = (percent, {"name": student.profile.name}) result = cache.values() return [entry for entry in result if entry[0] > 0] # Return all non-zero grades
def prepare_sections_with_grade(request, course): ''' Create sections with grade details. Return format: { 'sections': [ { 'display_name': name, # in case of cohorts or any other accessibility settings 'hidden': hidden, 'url_name': url_name, 'units': UNITS, 'rank': rank, 'badge': bagde status, 'points': grade points, 'podium': podium status, 'week': section_index + 1, }, ], } where UNITS is a list [ { 'display_name': name, 'position': unit position in section, 'css_class': css class, } , ... ] sections with name 'hidden' are skipped. NOTE: assumes that if we got this far, user has access to course. Returns [] if this is not the case. ''' # Set the student to request user student = request.user # Get the field data cache field_data_cache = FieldDataCache.cache_for_descriptor_descendents( course.id, student, course, depth=2, ) # Get the course module with modulestore().bulk_operations(course.id): course_module = get_module_for_descriptor( student, request, course, field_data_cache, course.id, course=course ) if course_module is None: return [] # Get the field data cache staff_user = User.objects.filter(is_staff=1)[0] staff_field_data_cache = FieldDataCache.cache_for_descriptor_descendents( course.id, staff_user, course, depth=2, ) # Get the course module with modulestore().bulk_operations(course.id): staff_course_module = get_module_for_descriptor( staff_user, request, course, staff_field_data_cache, course.id, course=course ) # staff accessible chapters staff_chapters = staff_course_module.get_display_items() # find the passing grade for the course nonzero_cutoffs = [cutoff for cutoff in course.grade_cutoffs.values() if cutoff > 0] success_cutoff = min(nonzero_cutoffs) if nonzero_cutoffs else 0 # find the course progress progress = get_course_progress(student, course.id) # prepare a list of discussions participated by user discussions_participated = get_discussions_participated( request, course.id.to_deprecated_string(), student.id ) # get courseware summary with outer_atomic(): field_data_cache = grades.field_data_cache_for_grading(course, student) scores_client = ScoresClient.from_field_data_cache(field_data_cache) courseware_summary = grades.progress_summary( student, request, course, field_data_cache=field_data_cache, scores_client=scores_client ) section_grades = {} for section in courseware_summary: earned = 0 total = 0 for sub_section in section['sections']: earned += sub_section['section_total'].earned total += sub_section['section_total'].possible section_score = earned / total if earned > 0 and total > 0 else 0 section_grades[section['url_name']] = { 'earned': earned, 'total': total, 'css_class': ('text-red', 'text-green')[int(section_score >= 0.6)] if total > 0 else '' } # Check for content which needs to be completed # before the rest of the content is made available required_content = milestones_helpers.get_required_content(course, student) # Check for gated content gated_content = gating_api.get_gated_content(course, student) # The user may not actually have to complete the entrance exam, if one is required if not user_must_complete_entrance_exam(request, student, course): required_content = [content for content in required_content if not content == course.entrance_exam_id] # define inner function def create_module(descriptor): '''creates an XModule instance given a descriptor''' return get_module_for_descriptor( student, request, descriptor, field_data_cache, course.id, course=course ) with outer_atomic(): submissions_scores = sub_api.get_scores( course.id.to_deprecated_string(), anonymous_id_for_user(student, course.id) ) max_scores_cache = grades.MaxScoresCache.create_for_course(course) max_scores_cache.fetch_from_remote(field_data_cache.scorable_locations) sections = list() student_chapters = course_module.get_display_items() urlname_chapters = {} for student_chap in student_chapters: urlname_chapters.update({student_chap.url_name:student_chap}) final_chapters = OrderedDict() for chapter_index, chapter in enumerate(staff_chapters): fin_chap = urlname_chapters.get(chapter.url_name) if fin_chap: final_chapters.update({str(chapter_index+1):{'hidden': False, 'chapter':fin_chap}}) else: final_chapters.update({str(chapter_index+1):{'hidden':True}}) for section_index, chapter_info in final_chapters.items(): # Mark as hidden and Skip the current chapter if a hide flag is tripped if chapter_info['hidden']: sections.append({ 'hidden': True, 'week': "WEEK {week}: ".format(week=section_index), 'points': { 'total': 0, 'earned': 0, 'css_class': 'text-disabled' }, }) continue chapter = chapter_info['chapter'] # get the points section_points = section_grades.get(chapter.url_name, {}) units = list() for sequential in chapter.get_display_items(): # Set hidden status of the sequential if it is gated/hidden from the user hidden = ( gated_content and unicode(sequential.location) in gated_content or sequential.hide_from_toc ) if hidden: continue for index, unit in enumerate(sequential.get_display_items()): css_class = 'dark-gray' if unit.graded: total_excercises = 0 attempted_excercises = 0 unit_max_score = 0 unit_score = 0 for component in unit.get_display_items(): if component.category == 'problem': if component.graded: total_excercises += 1 attempted_excercises += is_attempted_internal( str(component.location), progress ) (correct, total) = grades.get_score( student, component, create_module, scores_client, submissions_scores, max_scores_cache, ) unit_max_score += total unit_score += correct if total_excercises: css_class = 'blue' if attempted_excercises == total_excercises: css_class = 'green' if unit_max_score and unit_score / unit_max_score < success_cutoff: css_class = 'red' position = index + 1 # For jumping to the unit directly unit_context = { 'display_name': unit.display_name_with_default_escaped, 'position': position, 'css_class': css_class, 'courseware_url': reverse( 'courseware_position', args=[ course.id, chapter.url_name, sequential.url_name, position ] ) } units.append(unit_context) competency = None if int(section_points.get('total')): competency = int(section_points.get('earned')) == int(section_points.get('total')) section_context = { 'display_name': chapter.display_name_with_default_escaped, 'url_name': chapter.url_name, 'hidden': False, 'rank': 1, 'competency': competency, 'points': { 'total': int(section_points.get('total')), 'earned': int(section_points.get('earned')), 'css_class': section_points.get('css_class') }, 'participation': discussions_participated.get(chapter.url_name), 'units': units, 'week': "WEEK {week}: ".format(week=section_index), } sections.append(section_context) return sections
def get_grades(self, target_block_id, limit_hint=None): """ Get the grades as a percentage for the top students who have started or completed target_block_id and its descendants. The current implementation is as follows: This block 'grade_source' class calls the get_score() grading computation built into edX which returns the grade for the current student only, which is then cached in the grade_leaderboard xblock in a user_state_summary field. The block will then use the cached grades from all students to compute the leaderboard. Pros: * Fairly efficient - generally uses cached grades, making one MySQL query per block * Grading should match edX grading very well since it uses the same method * Supports all block types Cons: * Requires students to view the block before their grades appear in the leaderboard - could result in missing/outdated data, depending on where the block appears in the course and how often students view the leaderboard. * Storing data in Scope.user_state_summary will lead to read write conflicts. e.g. if two students view the courseware simultaneously and their requests are handled by different gunicorn processes. TODO: We will need to replace this implementation with something that uses celery to update student grades asynchronously and stores them in a more robust cache format. """ total_correct, total_possible = 0, 0 course_id = target_block_id.course_key.for_branch( None).version_agnostic() target_block = self.host_block.runtime.get_block(target_block_id) student = self.host_block.runtime.get_real_user( self.host_block.runtime.anonymous_student_id) def create_module(descriptor): return target_block.runtime.get_block(descriptor.location) for module_descriptor in yield_dynamic_descriptor_descendents( target_block, create_module): (correct, total) = get_score(course_id, student, module_descriptor, create_module) if (correct is None and total is None) or (not total > 0): continue # Note we ignore the 'graded' flag since authors may wish to use a leaderboard for non-graded content total_correct += correct total_possible += total percent = int(round(float(total_correct) / total_possible * 100)) with self.update_grades_cache(target_block_id) as cache: cache[unicode(student.id)] = (percent, { "name": student.profile.name }) result = cache.values() return [entry for entry in result if entry[0] > 0] # Return all non-zero grades
def student_view(self, context): """ Create a fragment used to display the XBlock to a student. `context` is a dictionary used to configure the display (unused). Returns a `Fragment` object specifying the HTML, CSS, and JavaScript to display. """ attempts = None max_attempts = None score = None passing = False is_attempted = False # On first adding the block, the studio calls student_view instead of # author_view, which causes problems. Force call of the author view. if getattr(self.runtime, 'is_author_mode', False): return self.author_view() if EDX_FOUND: total_correct, total_possible = 0, 0 target_block_id = self.graded_target_id course_id = target_block_id.course_key.for_branch(None).version_agnostic() target_block = self.runtime.get_block(target_block_id) student = user_by_anonymous_id(self.runtime.anonymous_student_id) count = 0 if student: def create_module(descriptor): return target_block.runtime.get_block(descriptor.location) for module_descriptor in yield_dynamic_descriptor_descendents(target_block, create_module): (correct, total) = get_score(course_id, student, module_descriptor, create_module) if (correct is None and total is None) or (not total > 0): continue # Note we ignore the 'graded' flag since authors may wish to use a leaderboard for non-graded content total_correct += correct total_possible += total count += 1 attempts = module_descriptor.problem_attempts max_attempts = module_descriptor.max_attempts is_attempted = module_descriptor.is_attempted embed_code = module_descriptor.get_problem_html else: embed_code = "aaaa" else: embed_code = "Error: EdX not found." #lets do some math to see if we passed score = 0 # calculate score passing = False if total_possible > 0: score = total_correct / total_possible * 100 #get score out of 100, not 1 if score >= 80: passing = True if attempts > 0 or is_attempted == True: # student has submitted this assessment result_file="assessmentendcap.html" if passing: result_file = "assessmentendcap-pass.html" else: if max_attempts and attempts < max_attempts: # student can still submit this problem again result_file = "assessmentendcap-tryagain.html" elif attempts >= max_attempts: # student has submitted this problem the max number of times result_file = "assessmentendcap-fail.html" else: #student has not submitted assessment. We don't need to render anything. result_file="assessmentendcap-blank.html" return self.create_fragment( "static/html/" + result_file, context={ 'embed_code': embed_code, 'total_correct': total_correct, 'total_possible': total_possible, 'score': score, 'attempts': attempts, 'max_attempts': max_attempts, 'count' : count, 'is_attempted': is_attempted }, javascript=["static/js/assessmentendcap.js"], initialize='AssessmentEndcapXBlock' )
def handle_check_marketo_completion_score(sender, module, grade, max_grade, **kwargs): """ when a StudentModule is saved, check whether it is of a type that can affect our Marketo course completion percentage. If so, calculate the new course completion percentage for the StudentModule's related Course, and then make a REST API request to Marketo to update the completion field. """ from courseware.courses import get_course from courseware.grades import manual_transaction, get_score if not grade: # a zero grade can't increase the Marketo complete score so no reason to check # we shouldn't get a False or None grade so we'll throw those out too return if not (get_value("course_enable_marketo_integration", None) and not \ getattr(settings.FEATURES, "COURSE_ENABLE_MARKETO_INTEGRATION", None) ): return course_map = get_value("marketo_course_complete_field_map", None) if not course_map: logger.warn("Could not find Marketo course completion field map.") return instance = module student = instance.student if str(instance.course_id) not in course_map.keys(): return # only continue for problem submissions state_dict = json.loads(instance.state) if instance.state else defaultdict(bool) if not state_dict or instance.module_type not in \ StudentModuleHistory.HISTORY_SAVING_TYPES or \ 'student_answers' not in state_dict.keys(): return if cached_check_marketo_complete(str(instance.course_id), student.email, course_map): # already marked complete. don't need to keep checking return course = get_course(instance.course_id) logger.info(('Checking InterSystems Marketo completion score for course ' '{0}, student {1}').format(instance.course_id, student)) # TODO: make a Mocked REST API response # TODO: make call to REST API grading_context = course.grading_context # right now we don't care about state stored via the Submissions API # since this is only used by ORA2 and InterSystems doesn't use that # largely taken from courseware/grades.py non_zero_module_scores = 0 total_scorable_modules = 0 for section_format, sections in grading_context['graded_sections'].iteritems(): for section in sections: section_descriptor = section['section_descriptor'] section_name = section_descriptor.display_name_with_default if settings.DEBUG: logger.info('Checking completion of section {}'.format(section_name)) def noop_track_function(event_type, event): pass def create_module(descriptor): '''creates an XModule instance given a descriptor''' with manual_transaction(): field_data_cache = FieldDataCache([descriptor], course.id, student) # don't need tracking/xqueue but we have to pass something return get_module_for_descriptor_internal( student, descriptor, field_data_cache, course.id, track_function=noop_track_function, # dummy xqueue_callback_url_prefix='', # dummy request_token='') # dummy for module_descriptor in yield_dynamic_descriptor_descendents(section_descriptor, create_module): (correct, total) = get_score( course.id, student, module_descriptor, create_module ) if correct is None and total is None: continue graded = module_descriptor.graded if graded and total > 0: total_scorable_modules += 1 if correct > 0: # just has to have a non-zero score non_zero_module_scores += 1 if total_scorable_modules > 0: nonzero_modules = float(non_zero_module_scores) / float(total_scorable_modules) if nonzero_modules * 100 >= \ MIN_SCORED_PERCENTAGE_FOR_MARKETO_COMPLETE: # inform Marketo that this course is 'complete' update_marketo_complete(course_map, str(instance.course_id), student.email)
def student_view(self, context): """ Create a fragment used to display the XBlock to a student. `context` is a dictionary used to configure the display (unused). Returns a `Fragment` object specifying the HTML, CSS, and JavaScript to display. """ attempts = None max_attempts = None score = None passing = False is_attempted = False # On first adding the block, the studio calls student_view instead of # author_view, which causes problems. Force call of the author view. if getattr(self.runtime, 'is_author_mode', False): return self.author_view() if EDX_FOUND: total_correct, total_possible = 0, 0 target_block_id = self.graded_target_id course_id = target_block_id.course_key.for_branch( None).version_agnostic() target_block = self.runtime.get_block(target_block_id) student = user_by_anonymous_id(self.runtime.anonymous_student_id) count = 0 if student: def create_module(descriptor): return target_block.runtime.get_block(descriptor.location) for module_descriptor in yield_dynamic_descriptor_descendents( target_block, create_module): (correct, total) = get_score(course_id, student, module_descriptor, create_module) if (correct is None and total is None) or (not total > 0): continue # Note we ignore the 'graded' flag since authors may wish to use a leaderboard for non-graded content total_correct += correct total_possible += total count += 1 attempts = module_descriptor.problem_attempts max_attempts = module_descriptor.max_attempts is_attempted = module_descriptor.is_attempted embed_code = module_descriptor.get_problem_html else: embed_code = "aaaa" else: embed_code = "Error: EdX not found." #lets do some math to see if we passed score = 0 # calculate score passing = False if total_possible > 0: score = total_correct / total_possible * 100 #get score out of 100, not 1 if score >= 80: passing = True if attempts > 0 or is_attempted == True: # student has submitted this assessment result_file = "assessmentendcap.html" if passing: result_file = "assessmentendcap-pass.html" else: if max_attempts and attempts < max_attempts: # student can still submit this problem again result_file = "assessmentendcap-tryagain.html" elif attempts >= max_attempts: # student has submitted this problem the max number of times result_file = "assessmentendcap-fail.html" else: #student has not submitted assessment. We don't need to render anything. result_file = "assessmentendcap-blank.html" return self.create_fragment( "static/html/" + result_file, context={ 'embed_code': embed_code, 'total_correct': total_correct, 'total_possible': total_possible, 'score': score, 'attempts': attempts, 'max_attempts': max_attempts, 'count': count, 'is_attempted': is_attempted }, javascript=["static/js/assessmentendcap.js"], initialize='AssessmentEndcapXBlock')