def score_from_csv(assign_id, rows, kind='total', invalid=None, message=None): """ Job for uploading Scores. @param ``rows`` should be a list of records (mappings), with labels `email` and `score` """ log = jobs.get_job_logger() current_user = jobs.get_current_job().user assign = Assignment.query.get(assign_id) message = message or '{} score for {}'.format(kind.title(), assign.display_name) def log_err(msg): log.info('\t! {}'.format(msg)) log.info("Uploading scores for {}:\n".format(assign.display_name)) if invalid: log_err('skipping {} invalid entries on lines:'.format(len(invalid))) for line in invalid: log_err('\t{}'.format(line)) log.info('') success, total = 0, len(rows) for i, row in enumerate(rows, start=1): try: email, score = row['email'], row['score'] user = User.query.filter_by(email=email).one() backup = Backup.query.filter_by(assignment=assign, submitter=user, submit=True).first() if not backup: backup = Backup.create(submitter=user, assignment=assign, submit=True) uploaded_score = Score(grader=current_user, assignment=assign, backup=backup, user=user, score=score, kind=kind, message=message) db.session.add(uploaded_score) uploaded_score.archive_duplicates() except SQLAlchemyError: print_exc() log_err('error: user with email `{}` does not exist'.format(email)) else: success += 1 if i % 100 == 0: log.info('\nUploaded {}/{} Scores\n'.format(i, total)) db.session.commit() log.info('\nSuccessfully uploaded {} "{}" scores (with {} errors)'.format(success, kind, total - success)) return '/admin/course/{cid}/assignments/{aid}/scores'.format( cid=jobs.get_current_job().course_id, aid=assign_id)
def gen_backup(user, assignment): seconds_offset = random.randrange(-100000, 100000) messages = gen_messages(assignment, seconds_offset) submit = gen_bool(0.3) if submit: messages['file_contents']['submit'] = '' backup = Backup.create(created=assignment.due_date + datetime.timedelta(seconds=seconds_offset), submitter=user, assignment=assignment, submit=submit) backup.messages = [ Message(kind=k, contents=m) for k, m in messages.items() ] return backup
def submit_assignment(name): # TODO: Unify student & staff upload. assign = get_assignment(name) group = Group.lookup(current_user, assign) user_ids = assign.active_user_ids(current_user.id) fs = assign.final_submission(user_ids) if not assign.uploads_enabled: flash("This assignment cannot be submitted online", 'warning') return redirect(url_for('.assignment', name=assign.name)) extension = None # No need for an extension if not assign.active: extension = Extension.get_extension(current_user, assign) if not extension: flash("It's too late to submit this assignment", 'warning') return redirect(url_for('.assignment', name=assign.name)) if request.method == "POST": backup = Backup.create( submitter=current_user, assignment=assign, submit=True, ) assignment = backup.assignment if extension: backup.custom_submission_time = extension.custom_submission_time templates = assignment.files or [] files = {} def extract_file_index(file_ind): """ Get the index of of file objects. Used because request.files.getlist() does not handle uniquely indexed lists. >>> extract_file_index('file[12']) 12 """ brace_loc = file_ind.find('[') index_str = file_ind[brace_loc+1:-1] return int(index_str) # A list of one element lists sorted_uploads = sorted(list(request.files.items()), key=lambda x: extract_file_index(x[0])) uploads = [v[1] for v in sorted_uploads] full_path_names = list(request.form.listvalues())[0] template_files = assign.files or [] file_names = [os.path.split(f)[1] for f in full_path_names] missing = [t for t in template_files if t not in file_names] if missing: return jsonify({ 'error': ('Missing files: {}. The following files are required: {}' .format(', '.join(missing), ', '.join(template_files))) }), 400 backup_folder_postfix = time.time() for full_path, upload in zip(full_path_names, uploads): data = upload.read() if len(data) > MAX_UPLOAD_FILE_SIZE: # file is too large (over 25 MB) return jsonify({ 'error': ('{} is larger than the maximum file size of {} MB' .format(full_path, MAX_UPLOAD_FILE_SIZE/1024/1024)) }), 400 try: files[full_path] = str(data, 'utf-8') except UnicodeDecodeError: upload.stream.seek(0) # We've already read data, so reset before uploading dest_folder = "uploads/{}/{}/{}/".format(assign.name, current_user.id, backup_folder_postfix) bin_file = ExternalFile.upload(upload.stream, current_user.id, full_path, staff_file=False, prefix=dest_folder, course_id=assign.course.id, backup=backup, assignment_id=assign.id) db.session.add(bin_file) message = Message(kind='file_contents', contents=files) backup.messages.append(message) db.session.add(backup) db.session.commit() # Send to continuous autograder if assign.autograding_key and assign.continuous_autograding: try: submit_continuous(backup) except ValueError as e: flash('Did not send to autograder: {}'.format(e), 'warning') return jsonify({ 'backup': backup.hashid, 'url': url_for('.code', name=assign.name, submit=backup.submit, bid=backup.id) }) return render_template('student/assignment/submit.html', assignment=assign, group=group, course=assign.course)