def enrollment(cid): courses, current_course = get_courses(cid) form = forms.EnrollmentForm() if form.validate_on_submit(): email, role = form.email.data, form.role.data Enrollment.enroll_from_form(cid, form) flash("Added {email} as {role}".format( email=email, role=role), "success") students = (Enrollment.query.options(db.joinedload('user')) .filter_by(course_id=cid, role=STUDENT_ROLE) .order_by(Enrollment.created.desc()) .all()) staff = (Enrollment.query.options(db.joinedload('user')) .filter(Enrollment.course_id == cid, Enrollment.role.in_(STAFF_ROLES)) .all()) lab_assistants = (Enrollment.query.options(db.joinedload('user')) .filter_by(course_id=cid, role=LAB_ASSISTANT_ROLE) .order_by(Enrollment.created.desc()) .all()) return render_template('staff/course/enrollment/enrollment.html', enrollments=students, staff=staff, lab_assistants=lab_assistants, form=form, unenroll_form=forms.CSRFForm(), courses=courses, current_course=current_course)
def student_assignment_detail(cid, email, aid): courses, current_course = get_courses(cid) page = request.args.get('page', 1, type=int) assign = Assignment.query.filter_by(id=aid, course_id=cid).one_or_none() if not assign or not Assignment.can(assign, current_user, 'grade'): flash('Cannot access assignment', 'error') return abort(404) student = User.lookup(email) if not student.is_enrolled(cid): flash("This user is not enrolled", 'warning') assignment_stats = assign.user_status(student, staff_view=True) user_ids = assign.active_user_ids(student.id) latest = assignment_stats.final_subm or assign.backups(user_ids).first() stats = { 'num_backups': assign.backups(user_ids).count(), 'num_submissions': assign.submissions(user_ids).count(), 'current_q': None, 'attempts': None, 'latest': latest, 'analytics': latest and latest.analytics() } backups = (Backup.query.options( db.joinedload('scores'), db.joinedload('submitter')).filter( Backup.submitter_id.in_(user_ids), Backup.assignment_id == assign.id).order_by( Backup.flagged.desc(), Backup.submit.desc(), Backup.created.desc())) paginate = backups.paginate(page=page, per_page=15) if stats['analytics']: stats['current_q'] = stats['analytics'].get('question') stats['attempts'] = (stats['analytics'].get('history', {}).get('all_attempts')) return render_template('staff/student/assignment.html', courses=courses, current_course=current_course, student=student, assignment=assign, add_member_form=forms.StaffAddGroupFrom(), paginate=paginate, csrf_form=forms.CSRFForm(), stats=stats, assign_status=assignment_stats)
def student_assignment_detail(cid, email, aid): courses, current_course = get_courses(cid) page = request.args.get('page', 1, type=int) assign = Assignment.query.filter_by(id=aid, course_id=cid).one_or_none() if not assign or not Assignment.can(assign, current_user, 'grade'): flash('Cannot access assignment', 'error') return abort(404) student = User.lookup(email) if not student.is_enrolled(cid): flash("This user is not enrolled", 'warning') assignment_stats = assign.user_status(student) user_ids = assign.active_user_ids(student.id) latest = assignment_stats.final_subm or assign.backups(user_ids).first() stats = { 'num_backups': assign.backups(user_ids).count(), 'num_submissions': assign.submissions(user_ids).count(), 'current_q': None, 'attempts': None, 'latest': latest, 'analytics': latest and latest.analytics() } backups = (Backup.query.options(db.joinedload('scores'), db.joinedload('submitter')) .filter(Backup.submitter_id.in_(user_ids), Backup.assignment_id == assign.id) .order_by(Backup.flagged.desc(), Backup.submit.desc(), Backup.created.desc())) paginate = backups.paginate(page=page, per_page=15) if stats['analytics']: stats['current_q'] = stats['analytics'].get('question') stats['attempts'] = (stats['analytics'].get('history', {}) .get('all_attempts')) return render_template('staff/student/assignment.html', courses=courses, current_course=current_course, student=student, assignment=assign, add_member_form=forms.StaffAddGroupFrom(), paginate=paginate, csrf_form=forms.CSRFForm(), upload_form=forms.UploadSubmissionForm(), stats=stats, assign_status=assignment_stats)
def assignment_single_queue(cid, aid, uid): courses, current_course = get_courses(cid) assignment = Assignment.query.filter_by(id=aid, course_id=cid).one_or_none() if not Assignment.can(assignment, current_user, 'grade'): flash('Insufficient permissions', 'error') return abort(401) assigned_grader = User.get_by_id(uid) if not Assignment.can(assignment, assigned_grader, 'grade'): return abort(404) page = request.args.get('page', 1, type=int) tasks_query = GradingTask.query.filter_by(assignment=assignment, grader_id=uid) queue = (tasks_query.options(db.joinedload('assignment')) .order_by(GradingTask.score_id.asc()) .order_by(GradingTask.created.asc()) .paginate(page=page, per_page=20)) remaining = tasks_query.filter_by(score_id=None).count() percent_left = (1-(remaining/max(1, queue.total))) * 100 return render_template('staff/grading/queue.html', courses=courses, current_course=current_course, assignment=assignment, grader=assigned_grader, queue=queue, remaining=remaining, percent_left=percent_left)
def edit_backup(bid): courses, current_course = get_courses() backup = Backup.query.options(db.joinedload('assignment')).get(bid) if not backup: abort(404) if not Backup.can(backup, current_user, 'grade'): flash("You do not have permission to score this assignment.", "warning") abort(401) form = forms.SubmissionTimeForm() if form.validate_on_submit(): backup.custom_submission_time = form.get_submission_time( backup.assignment) db.session.commit() flash('Submission time saved', 'success') return redirect(url_for('.edit_backup', bid=bid)) else: form.set_submission_time(backup) return render_template( 'staff/grading/edit.html', courses=courses, current_course=current_course, backup=backup, student=backup.submitter, form=form, )
def remove_lab_assistants(self): [ db.session.delete(e) for e in ( Enrollment.query.options(db.joinedload('user')).filter_by( role=LAB_ASSISTANT_ROLE, course_id=self.course.id).all()) ] db.session.commit()
def email_scores(assignment_id, score_tags, subject, body, reply_to=None, dry_run=False): log = jobs.get_job_logger() job_creator = jobs.get_current_job().user assign = Assignment.query.get(assignment_id) students = [e.user for e in (Enrollment.query .options(db.joinedload('user')) .filter(Enrollment.role == STUDENT_ROLE, Enrollment.course == assign.course) .all())] email_counter = 0 seen_ids = set() for student in students: if student.id in seen_ids: continue user_ids = assign.active_user_ids(student.id) seen_ids |= user_ids scores = [s for s in assign.scores(user_ids) if s.kind in score_tags] if scores: users = User.query.filter(User.id.in_(user_ids)) primary, cc = users[0].email, [u.email for u in users[1:]] if dry_run: primary, cc = job_creator.email, [] result = send_email(primary, subject, body, cc=cc, template='email/scores.html', title=subject, from_name=assign.course.display_name, scores=scores, reply_to=reply_to, link_text="View on okpy.org", link="https://okpy.org/" + assign.name, # Don't have url_for assignment=assign.display_name) if result: log.info("Sent to {}".format(', '.join([primary] + cc))) email_counter += 1 # Send a few emails in dry run mode. if dry_run and email_counter >= 2: message = "Run with dry run mode" log.info(message) return message message = "Sent {} emails".format(email_counter) log.info(message) return message
def grading_tasks(username=None): courses, current_course = get_courses() page = request.args.get('page', 1, type=int) tasks_query = GradingTask.query.filter_by(grader=current_user) queue = (tasks_query.options(db.joinedload('assignment')) .order_by(GradingTask.score_id.asc()) .order_by(GradingTask.created.asc()) .paginate(page=page, per_page=20)) remaining = tasks_query.filter_by(score_id=None).count() percent_left = (1-(remaining/max(1, queue.total))) * 100 return render_template('staff/grading/queue.html', courses=courses, queue=queue, remaining=remaining, percent_left=percent_left)
def enrollment(cid): courses, current_course = get_courses(cid) single_form = forms.EnrollmentForm(prefix="single") if single_form.validate_on_submit(): email, role = single_form.email.data, single_form.role.data Enrollment.enroll_from_form(cid, single_form) flash("Added {email} as {role}".format( email=email, role=role), "success") students = (Enrollment.query.options(db.joinedload('user')) .filter_by(course_id=cid, role=STUDENT_ROLE) .order_by(Enrollment.created.desc()) .all()) staff = (Enrollment.query.options(db.joinedload('user')) .filter(Enrollment.course_id == cid, Enrollment.role.in_(STAFF_ROLES)) .all()) return render_template('staff/course/enrollment.html', enrollments=students, staff=staff, single_form=single_form, courses=courses, current_course=current_course)
def export_grades(): logger = jobs.get_job_logger() current_user = jobs.get_current_job().user course = Course.query.get(jobs.get_current_job().course_id) assignments = course.assignments students = (Enrollment.query.options(db.joinedload('user')).filter( Enrollment.role == STUDENT_ROLE, Enrollment.course == course).all()) headers, assignments = get_headers(assignments) logger.info("Using these headers:") for header in headers: logger.info('\t' + header) logger.info('') total_students = len(students) users = [student.user for student in students] user_ids = [user.id for user in users] all_scores = collect_all_scores(assignments, user_ids) with io.StringIO() as f: writer = csv.writer(f) writer.writerow(headers) # write headers for i, student in enumerate(students, start=1): row = export_student_grades(student, assignments, all_scores) writer.writerow(row) if i % 50 == 0: logger.info('Exported {}/{}'.format(i, total_students)) f.seek(0) created_time = local_time(dt.datetime.now(), course, fmt='%b-%-d %Y at %I-%M%p') csv_filename = '{course_name} Grades ({date}).csv'.format( course_name=course.display_name, date=created_time) # convert to bytes for csv upload csv_bytes = io.BytesIO(bytearray(f.read(), 'utf-8')) upload = ExternalFile.upload(csv_bytes, user_id=current_user.id, name=csv_filename, course_id=course.id, prefix='jobs/exports/{}/'.format( course.offering)) logger.info('\nDone!\n') logger.info("Saved as: {0}".format(upload.object_name)) return "/files/{0}".format(encode_id(upload.id))
def autograde_backup(bid): backup = Backup.query.options(db.joinedload('assignment')).get(bid) if not backup: abort(404) if not Backup.can(backup, current_user, 'grade'): flash("You do not have permission to score this assignment.", "warning") abort(401) form = forms.CSRFForm() if form.validate_on_submit(): try: autograder.autograde_backup(backup) flash('Submitted to the autograder', 'success') except ValueError as e: flash(str(e), 'error') return redirect(url_for('.grading', bid=bid))
def enrollment_csv(cid): courses, current_course = get_courses(cid) query = (Enrollment.query.options(db.joinedload('user')) .filter_by(course_id=cid, role=STUDENT_ROLE)) file_name = "{0}-roster.csv".format(current_course.offering.replace('/', '-')) disposition = 'attachment; filename={0}'.format(file_name) items = User.export_items + Enrollment.export_items def row_to_csv(row): return [row.export, row.user.export] csv_generator = utils.generate_csv(query, items, row_to_csv) return Response(stream_with_context(csv_generator), mimetype='text/csv', headers={'Content-Disposition': disposition})
def export_scores(cid, aid): courses, current_course = get_courses(cid) assign = Assignment.query.filter_by(id=aid, course_id=cid).one_or_none() if not Assignment.can(assign, current_user, 'export'): flash('Insufficient permissions', 'error') return abort(401) query = (Score.query.options(db.joinedload('backup')).filter_by( assignment=assign, archived=False)) custom_items = ('time', 'is_late', 'email', 'group') items = custom_items + Enrollment.export_items + Score.export_items def generate_csv(): """ Generate csv export of scores for assignment. Num Queries: ~2N queries for N scores. """ # Yield Column Info as first row yield ','.join(items) + '\n' for score in query: csv_file = StringIO() csv_writer = csv.DictWriter(csv_file, fieldnames=items) submitters = score.backup.enrollment_info() group = [s.user.email for s in submitters] time_str = utils.local_time(score.backup.created, current_course) for submitter in submitters: data = { 'email': submitter.user.email, 'time': time_str, 'is_late': score.backup.is_late, 'group': group } data.update(submitter.export) data.update(score.export) csv_writer.writerow(data) yield csv_file.getvalue() file_name = "{0}.csv".format(assign.name.replace('/', '-')) disposition = 'attachment; filename={0}'.format(file_name) # TODO: Remove. For local performance testing. # return render_template('staff/index.html', data=list(generate_csv())) return Response(stream_with_context(generate_csv()), mimetype='text/csv', headers={'Content-Disposition': disposition})
def audit_missing_scores(assign_id): logger = jobs.get_job_logger() assignment = Assignment.query.get(assign_id) data = assignment.course_submissions() students_with_subms = set(s['user']['email'] for s in data if s['backup']) students_without_subms = set(s['user']['email'] for s in data if not s['backup']) logger.info('Students with submissions: {}'.format( len(students_with_subms))) logger.info('Students without submissions: {}'.format( len(students_without_subms))) query = (Score.query.options(db.joinedload('backup')).filter_by( assignment=assignment, archived=False)) has_scores = defaultdict(set) all_scores = query.all() for score in all_scores: submitters = score.backup.enrollment_info() for s in submitters: has_scores[score.kind].add(s.user.email) logger.info("---" * 20) for score_kind in has_scores: difference = students_with_subms.difference(has_scores[score_kind]) logger.info("Number of students with {} scores is {}".format( score_kind, len(has_scores[score_kind]))) logger.info("Number of students without {} scores is {}".format( score_kind, len(difference))) if difference and len(difference) < 200: logger.info("Students without {} scores: {}".format( score_kind, ', '.join(difference))) elif len(difference) >= 200: # Avoid creating very long lines. subset = list(difference)[:200] logger.info( "{} students do not have {} scores. Here are a few: {}".format( len(difference), score_kind, ', '.join(subset))) logger.info("---" * 20)
def export_scores(cid, aid): courses, current_course = get_courses(cid) assign = Assignment.query.filter_by(id=aid, course_id=cid).one_or_none() if not Assignment.can(assign, current_user, 'export'): flash('Insufficient permissions', 'error') return abort(401) query = (Score.query.options(db.joinedload('backup')) .filter_by(assignment=assign, archived=False)) custom_items = ('time', 'is_late', 'email', 'group') items = custom_items + Enrollment.export_items + Score.export_items def generate_csv(): """ Generate csv export of scores for assignment. Num Queries: ~2N queries for N scores. """ # Yield Column Info as first row yield ','.join(items) + '\n' for score in query: csv_file = StringIO() csv_writer = csv.DictWriter(csv_file, fieldnames=items) submitters = score.backup.enrollment_info() group = [s.user.email for s in submitters] time_str = utils.local_time(score.backup.created, course) for submitter in submitters: data = {'email': submitter.user.email, 'time': time_str, 'is_late': score.backup.is_late, 'group': group} data.update(submitter.export) data.update(score.export) csv_writer.writerow(data) yield csv_file.getvalue() file_name = "{0}.csv".format(assign.name.replace('/', '-')) disposition = 'attachment; filename={0}'.format(file_name) # TODO: Remove. For local performance testing. # return render_template('staff/index.html', data=list(generate_csv())) return Response(stream_with_context(generate_csv()), mimetype='text/csv', headers={'Content-Disposition': disposition})
def audit_missing_scores(assign_id): logger = jobs.get_job_logger() assignment = Assignment.query.get(assign_id) data = assignment.course_submissions() students_with_subms = set(s['user']['email'] for s in data if s['backup']) students_without_subms = set(s['user']['email'] for s in data if not s['backup']) logger.info('Students with submissions: {}'.format(len(students_with_subms))) logger.info('Students without submissions: {}'.format(len(students_without_subms))) query = (Score.query.options(db.joinedload('backup')) .filter_by(assignment=assignment, archived=False)) has_scores = defaultdict(set) all_scores = query.all() for score in all_scores: submitters = score.backup.enrollment_info() for s in submitters: has_scores[score.kind].add(s.user.email) logger.info("---"*20) for score_kind in has_scores: difference = students_with_subms.difference(has_scores[score_kind]) logger.info("Number of students with {} scores is {}".format(score_kind, len(has_scores[score_kind]))) logger.info("Number of students without {} scores is {}".format(score_kind, len(difference))) if difference and len(difference) < 200: logger.info("Students without {} scores: {}".format(score_kind, ', '.join(difference))) elif len(difference) >= 200: # Avoid creating very long lines. subset = list(difference)[:200] logger.info("{} students do not have {} scores. Here are a few: {}" .format(len(difference), score_kind, ', '.join(subset))) logger.info("---"*20)
def export_grades(): logger = jobs.get_job_logger() current_user = jobs.get_current_job().user course = Course.query.get(jobs.get_current_job().course_id) assignments = course.assignments students = (Enrollment.query .options(db.joinedload('user')) .filter(Enrollment.role == STUDENT_ROLE, Enrollment.course == course) .all()) headers, assignments = get_headers(assignments) logger.info("Using these headers:") for header in headers: logger.info('\t' + header) logger.info('') total_students = len(students) with io.StringIO() as f: writer = csv.writer(f) writer.writerow(headers) # write headers for i, student in enumerate(students, start=1): row = export_student_grades(student, assignments) writer.writerow(row) if i % 50 == 0: logger.info('Exported {}/{}'.format(i, total_students)) f.seek(0) created_time = local_time(dt.datetime.now(), course, fmt='%b-%-d %Y at %I-%M%p') csv_filename = '{course_name} Grades ({date}).csv'.format( course_name=course.display_name, date=created_time) # convert to bytes for csv upload csv_bytes = io.BytesIO(bytearray(f.read(), 'utf-8')) upload = ExternalFile.upload(csv_bytes, user_id=current_user.id, name=csv_filename, course_id=course.id, prefix='jobs/exports/{}/'.format(course.offering)) logger.info('\nDone!\n') logger.info("Saved as: {0}".format(upload.object_name)) return "/files/{0}".format(encode_id(upload.id))
def grade(bid): """ Used as a form submission endpoint. """ backup = Backup.query.options(db.joinedload('assignment')).get(bid) if not backup: abort(404) if not Backup.can(backup, current_user, 'grade'): flash("You do not have permission to score this assignment.", "warning") abort(401) form = forms.GradeForm() score_kind = form.kind.data.strip().lower() is_composition = (score_kind == "composition") # TODO: Form should include redirect url instead of guessing based off tag if is_composition: form = forms.CompositionScoreForm() if not form.validate_on_submit(): return grading_view(backup, form=form) score = Score(backup=backup, grader=current_user, assignment_id=backup.assignment_id) form.populate_obj(score) db.session.add(score) db.session.commit() # Archive old scores of the same kind score.archive_duplicates() next_page = None flash_msg = "Added a {0} {1} score.".format(score.score, score_kind) # Find GradingTasks applicable to this score tasks = backup.grading_tasks for task in tasks: task.score = score cache.delete_memoized(User.num_grading_tasks, task.grader) db.session.commit() if len(tasks) == 1: # Go to next task for the current task queue if possible. task = tasks[0] next_task = task.get_next_task() next_route = '.composition' if is_composition else '.grading' # Handle case when the task is on the users queue if next_task: flash_msg += (" There are {0} tasks left. Here's the next submission:" .format(task.remaining)) next_page = url_for(next_route, bid=next_task.backup_id) else: flash_msg += " All done with grading for {}".format(backup.assignment.name) next_page = url_for('.grading_tasks') else: # TODO: Send task id or redirect_url in the grading form # For now, default to grading tasks next_page = url_for('.grading_tasks') flash(flash_msg, 'success') if not next_page: next_page = url_for('.assignment_queues', aid=backup.assignment_id, cid=backup.assignment.course_id) return redirect(next_page)
def submit_to_moss(moss_id=None, file_regex=".*", assignment_id=None, language=None): logger = jobs.get_job_logger() logger.info('Starting MOSS Export...') assign = Assignment.query.filter_by(id=assignment_id).one_or_none() if not assign: logger.info("Could not find assignment") return subms = assign.course_submissions(include_empty=False) subm_keys = set() for subm in subms: if subm['backup']['id'] in subm_keys: continue else: subm_keys.add(subm['backup']['id']) if subm['group']: group_members = subm['group']['group_member_emails'] or [] group_members.append(subm['user']['email']) logger.info("{} -> {}".format(encode_id(subm['backup']['id']), ', '.join(group_members))) else: logger.info("{} -> {}".format(encode_id(subm['backup']['id']), subm['user']['email'])) backup_query = (Backup.query.options(db.joinedload('messages')).filter( Backup.id.in_(subm_keys)).order_by(Backup.created.desc()).all()) logger.info("Retreived {} final submissions".format(len(subm_keys))) # TODO: Customize the location of the tmp writing (especially useful during dev) with tempfile.TemporaryDirectory() as tmp_dir: # Copy in the moss script with open('server/jobs/moss-submission.pl', 'r') as f: moss_script = f.read() moss_script = moss_script.replace('YOUR_USER_ID_HERE', str(moss_id)) with open(tmp_dir + "/moss.pl", 'w') as script: script.write(moss_script) match_pattern = re.compile(file_regex) ignored_files = set() for backup in backup_query: # Write file into file file_contents = [ m for m in backup.messages if m.kind == 'file_contents' ] if not file_contents: logger.info("{} didn't have any file contents".format( backup.hashid)) continue contents = file_contents[0].contents dest_dir = "{}/{}/".format(tmp_dir, backup.hashid) if not os.path.exists(dest_dir): os.makedirs(dest_dir) for file in contents: if file == 'submit': # ignore fake file from ok-client continue if match_pattern.match(file): with open(dest_dir + file, 'w') as f: f.write(contents[file]) else: ignored_files.add(file) # tmp_dir contains folders of the form: backup_hashid/file1.py os.chdir(tmp_dir) all_student_files = glob.glob("*/*") logger.info("Wrote all files to {}".format(tmp_dir)) if ignored_files: logger.info("Regex {} ignored files with names: {}".format( file_regex, ignored_files)) else: logger.info( "Regex {} has captured all possible files".format(file_regex)) template_files = [] for template in assign.files: dest = "{}/{}".format(tmp_dir, template) with open(dest, 'w') as f: f.write(assign.files[template]) template_files.append(template) logger.info("Using template files: {}".format( ' '.join(template_files))) templates = ' '.join( ["-b {file}".format(file=f) for f in template_files]) if not all_student_files: raise Exception("Did not match any files") # Ensure that all of the files are in the tmp_dir (and not elsewhere) command = ("perl moss.pl -l {lang} {templates} -d {folder}".format( lang=language, templates=templates, folder=' '.join(all_student_files))) logger.info("Running {}".format(command[:100] + ' ...')) try: process = subprocess.check_output(shlex.split(command), stderr=subprocess.STDOUT) logger.info(process.decode("utf-8")) except subprocess.CalledProcessError as e: logger.warning("There was an error running the Moss Upload.") logger.info("{}".format(e.output.decode('utf-8'))) raise e
def submit_to_moss(moss_id=None, file_regex=".*", assignment_id=None, language=None, subtract_template=False): logger = jobs.get_job_logger() logger.info('Starting MOSS Export...') assign = Assignment.query.filter_by(id=assignment_id).one_or_none() if not assign: logger.info("Could not find assignment") return subms = assign.course_submissions(include_empty=False) subm_keys = set() for subm in subms: if subm['backup']['id'] in subm_keys: continue else: subm_keys.add(subm['backup']['id']) if subm['group']: group_members = subm['group']['group_member_emails'] or [] group_members.append(subm['user']['email']) logger.info("{} -> {}".format(encode_id(subm['backup']['id']), ', '.join(group_members))) else: logger.info("{} -> {}".format(encode_id(subm['backup']['id']), subm['user']['email'])) backup_query = (Backup.query.options(db.joinedload('messages')) .filter(Backup.id.in_(subm_keys)) .order_by(Backup.created.desc()) .all()) logger.info("Retreived {} final submissions".format(len(subm_keys))) # TODO: Customize the location of the tmp writing (especially useful during dev) with tempfile.TemporaryDirectory() as tmp_dir: # Copy in the moss script with open('server/jobs/moss-submission.pl', 'r') as f: moss_script = f.read() moss_script = moss_script.replace('YOUR_USER_ID_HERE', str(moss_id)) with open(tmp_dir + "/moss.pl", 'w') as script: script.write(moss_script) match_pattern = re.compile(file_regex) ignored_files = set() template_files = [] for template in assign.files: dest = os.path.join(tmp_dir, template) with open(dest, 'w') as f: f.write(assign.files[template]) template_files.append(template) logger.info("Using template files: {}".format(' '.join(template_files))) if subtract_template: logger.info("Subtract Template Enabled: Not sending templates through MOSS") templates = '' else: templates = ' '.join(["-b {file}".format(file=f) for f in template_files]) for backup in backup_query: # Write file into file file_contents = [m for m in backup.messages if m.kind == 'file_contents'] if not file_contents: logger.info("{} didn't have any file contents".format(backup.hashid)) continue contents = file_contents[0].contents dest_dir = os.path.join(tmp_dir, backup.hashid) if not os.path.isdir(dest_dir): os.makedirs(dest_dir) for file in contents: if file == 'submit': # ignore fake file from ok-client continue if subtract_template and file in assign.files: # Compare to template and only include lines that new template, source = assign.files[file], contents[file] d = difflib.Differ(linejunk=difflib.IS_LINE_JUNK, charjunk=difflib.IS_CHARACTER_JUNK) diff = d.compare(template.splitlines(keepends=True), source.splitlines(keepends=True)) added = [line[1:] for line in diff if line[0] == '+'] contents[file] = ''.join(added) if match_pattern.match(file): with open(os.path.join(dest_dir, file), 'w') as f: f.write(contents[file]) else: ignored_files.add(file) # tmp_dir contains folders of the form: backup_hashid/file1.py os.chdir(tmp_dir) all_student_files = glob.glob("*/*") logger.info("Wrote all files to {}".format(tmp_dir)) if ignored_files: logger.info("Regex {} ignored files with names: {}".format(file_regex, ignored_files)) else: logger.info("Regex {} has captured all possible files".format(file_regex)) if not all_student_files: raise Exception("Did not match any files") # Ensure that all of the files are in the tmp_dir (and not elsewhere) command = ("perl moss.pl -l {lang} {templates} -d {folder}" .format(lang=language, templates=templates, folder=' '.join(all_student_files))) logger.info("Running {}".format(command[:100] + ' ...')) try: process = subprocess.check_output(shlex.split(command), stderr=subprocess.STDOUT) moss_output = process.decode("utf-8") logger.info(moss_output) last_line = moss_output if 'moss.stanford' in last_line: return last_line except subprocess.CalledProcessError as e: logger.warning("There was an error running the Moss Upload.") logger.info("{}".format(e.output.decode('utf-8'))) raise e
def moss_submit(moss_id, submissions, ref_submissions, language, template, review_threshold=101, max_matches=MAX_MATCHES, file_regex='.*', num_results=NUM_RESULTS): """ Sends SUBMISSIONS and REF_SUBMISSIONS to Moss using MOSS_ID, LANGUAGE, and MAX_MATCHES. Stores results involving SUBMISSIONS in database. """ # ISSUE: Does not work for .ipynb files well (maybe just use sources?) logger = jobs.get_job_logger() logger.info('Connecting to Moss...') moss = socket.socket() moss.connect(('moss.stanford.edu', 7690)) moss.send('moss {}\n'.format(moss_id).encode()) moss.send('directory 1\n'.encode()) moss.send('X 0\n'.encode()) moss.send('maxmatches {}\n'.format(max_matches).encode()) moss.send('show {}\n'.format(num_results).encode()) print(num_results) moss.send('language {}\n'.format(language).encode()) moss_success = moss.recv(1024).decode().strip() print(moss_success) moss_success = moss_success == 'yes' if not moss_success: moss.close() logger.info('FAILED to connect to Moss. Common issues:') logger.info('- Make sure your Moss ID is a number, and not your email address.') logger.info('- Check you typed your Moss ID correctly.') return subm_keys = set() hashed_subm_keys = set() for subm in submissions: subm_keys.add(subm['backup']['id']) hashed_subm_keys.add(encode_id(subm['backup']['id'])) for subm in ref_submissions: subm_keys.add(subm['backup']['id']) backup_query = (Backup.query.options(db.joinedload('messages')) .filter(Backup.id.in_(subm_keys)) .order_by(Backup.created.desc()) .all()) match_pattern = re.compile(file_regex) if template: logger.info('Uploading template...') merged_contents = "" for filename in template: if filename == 'submit' or not match_pattern.match(filename): continue merged_contents += template[filename] + '\n' send_file(moss, 'allcode', merged_contents, 0, language) fid = 0 logger.info('Uploading submissions...') for backup in backup_query: file_contents = [m for m in backup.messages if m.kind == 'file_contents'] if not file_contents: logger.info("{} didn't have any file contents".format(backup.hashid)) continue contents = file_contents[0].contents merged_contents = "" for filename in sorted(contents.keys()): if filename == 'submit' or not match_pattern.match(filename): continue merged_contents += contents[filename] + '\n' fid += 1 path = os.path.join(backup.hashid, 'allcode') send_file(moss, path, merged_contents, fid, language) moss.send("query 0 Submitted via okpy.org\n".encode()) logger.info('Awaiting response...') url = moss.recv(1024).decode().strip() moss.send("end\n".encode()) moss.close() logger.info('Moss results at: {}'.format(url)) parse_moss_results(url, hashed_subm_keys, logger, match_pattern, template, review_threshold)