def grade_new_submissions(): print("Checking for new submissions") students = c.downloadAssignment(courseName=course_name, assignmentName=assignment_name, subdirName=homework_path, allowLate=True) print("Downloaded new submissions from {}".format(students)) # Check if there's any students we failed to grade and comment on # and also grab their stuff to grade. for dir in next(os.walk(os.path.abspath(homework_path)))[1]: student_dir = os.path.abspath(homework_path + "/" + dir) if not os.path.isfile(student_dir + "/final_score.diff"): print("Student {} is missing their final score, will force regrade".format(dir)) student_id = int(dir) if student_id not in students: students.append(int(dir)) for student_id in students: student_dir = os.path.abspath(homework_path + "/" + str(student_id)) print('Processing student ' + student_dir) os.chdir(student_dir) for name, problem in reference_soln.items(): cl_stdout_file = name + '_cl.txt' stdin_file = ref_homework_dir + '/' + problem["stdin"] stdout_file = name + "_stdout.txt" ref_stdout_file = ref_homework_dir + '/' + problem["stdout"] result_file = name + '_results.txt' grade_file = name + '_grade.txt' # Compile the student's submission print('Compiling ' + name) grading.compile(cl_stdout_file, problem, name) # Run the student's submission exe = name + '.exe' print('Running ' + exe) grading.run_student(exe, stdin_file, stdout_file, cl_stdout_file) # Check the student's submision print('Checking ' + name) # Compare the student's output to our expected output grading.compare(ref_stdout_file, stdout_file, result_file) # count the number of warnings and errors grading.count_warnings_errors(cl_stdout_file, result_file) # Compile the grading document but don't assign a grade to the student # since the server has no idea what grade to assign grading.grade(problem, stdout_file, result_file, grade_file, ref_stdout_file, None) # Build the final score file print("Building final score for " + student_dir) graded_files = [f for f in next(os.walk(student_dir))[2]] grading.build_final_score(graded_files, reference_soln, None) # Upload their grade grading.upload_grade(c, False) # CD back up out of the student's directory os.chdir("..") if student_id not in student_submission_count: student_submission_count[student_id] = 1 else: student_submission_count[student_id] += 1 now = datetime.datetime.now() print("Students graded for {}".format(now))
# count the number of warnings and errors grading.count_warnings_errors(cl_stdout_file, result_file) # Open the student programs and outputs for final grading elif sys.argv[2] == 'grade' and not grading.check_grading(grade_file): grading.grade(problem, stdout_file, result_file, grade_file, ref_stdout_file, editor) # Check that a final grade for the assignment has been entered in the grade file if not grading.check_grading(grade_file): print("Error! No grade assigned for " + name) elif sys.argv[2] == 'regrade': grading.regrade(problem, stdout_file, result_file, grade_file, ref_stdout_file, editor) # Check that a final grade for the assignment has been entered in the grade file if not grading.check_grading(grade_file): print("Error! No grade assigned for " + name) if sys.argv[2] == 'grade' or sys.argv[2] == 'regrade': graded_files = [f for f in next(os.walk(student_dir))[2]] grading.build_final_score(graded_files, reference_soln, editor) # Compute final score statistics and log them if sys.argv[2] == 'stats': print("Score Summary:\n\tMean = {}\n\tStd dev = {}\n\tMedian = {}\n\tMax = {}\n\tMin = {}\n" .format(statistics.mean(grade_stats), statistics.stdev(grade_stats), statistics.median(grade_stats), max(grade_stats), min(grade_stats))) plt.hist(grade_stats, bins=20) plt.title("Histogram") plt.xlabel("Value") plt.ylabel("Frequency") plt.show()