def test_delete(self): """ Students should not be able to delete files. A deletion should work. """ self.assertFalse(ExternalFile.can(self.file1, self.user1, 'delete')) self.assertTrue(ExternalFile.can(self.file1, self.staff1, 'delete')) self.file1.delete() self.assertTrue(self.file1.deleted)
def file_download(file_id, user): ext_file = ExternalFile.query.filter_by(id=file_id, deleted=False).first() if not ext_file or not ExternalFile.can(ext_file, user, 'download'): logger.info("Access file without permission by {0}".format(user.email)) abort(404) try: storage_obj = ext_file.object() except libcloud.common.types.InvalidCredsError: logger.warning("Could not get file {0} - {1}".format(file_id, ext_file.filename), exc_info=True) storage_obj = None if storage_obj is None: abort(404, "File does not exist") basename = os.path.basename(ext_file.filename) # Do not use .download_url for local storage. if storage.provider == libcloud.storage.types.Provider.LOCAL: response = Response(storage.get_object_stream(storage_obj), mimetype=ext_file.mimetype) response.headers["Content-Security-Policy"] = "default-src 'none';" response.headers["X-Content-Type-Options"] = "nosniff" response.headers["Content-Disposition"] = ("attachment; filename={0!s}" .format(basename)) return response else: postpend = '&' if request.args.get('raw'): postpend += urlencode({'response-content-type': ext_file.mimetype}) elif request.args.get('download'): postpend += urlencode({'response-content-disposition': 'attachment', 'filename': basename}) url = storage.get_blob_url(storage_obj.name) return redirect(url + postpend)
def test_duplicate_overwrite(self): with open(CWD + "/files/fizzbuzz_after.py", 'rb') as f: duplicate = ExternalFile.upload(f, name='fizz.txt', user_id=self.staff1.id, course_id=self.course.id) duplicate_obj = duplicate.object() self.assertEquals(self.blob1.driver.key, duplicate_obj.driver.key) self.assertEquals(self.file1.filename, duplicate.filename) self.assertEquals(self.blob1.name, duplicate_obj.name) duplicate_obj.delete()
def test_duplicate_overwrite(self): with open(CWD + "/files/fizzbuzz_after.py", 'rb') as f: duplicate = ExternalFile.upload(f, name='fizz.txt', user_id=self.staff1.id, course_id=self.course.id) duplicate_obj = duplicate.object() self.assertEqual(self.blob1.driver.key, duplicate_obj.driver.key) self.assertEqual(self.file1.filename, duplicate.filename) self.assertEqual(self.blob1.name, duplicate_obj.name) duplicate_obj.delete()
def setUp(self): super(TestFile, self).setUp() self.setup_course() with open(CWD + "/files/fizzbuzz_after.py", 'rb') as f: self.file1 = ExternalFile.upload(f, name='fizz.txt', user_id=self.staff1.id, course_id=self.course.id) self.blob1 = self.file1.object() with open(CWD + "/../server/static/img/logo.svg", 'rb') as f: self.file2 = ExternalFile.upload(f, name='ok.svg', user_id=self.user1.id, course_id=self.course.id, assignment_id=self.assignment.id) self.blob2 = self.file1.object() db.session.add_all([self.file1, self.file2]) db.session.commit()
def setUp(self): super(TestFile, self).setUp() self.setup_course() with open(CWD + "/files/fizzbuzz_after.py", 'rb') as f: self.file1 = ExternalFile.upload(f, name='fizz.txt', user_id=self.staff1.id, staff_file=True, course_id=self.course.id) self.blob1 = self.file1.object() with open(CWD + "/../server/static/img/logo.svg", 'rb') as f: self.file2 = ExternalFile.upload(f, name='ok.svg', user_id=self.user1.id, course_id=self.course.id, staff_file=False, assignment_id=self.assignment.id) self.blob2 = self.file2.object() db.session.add_all([self.file1, self.file2]) db.session.commit()
def test_malicious_directory_traversal(self): with open(CWD + "/files/fizzbuzz_after.py", 'rb') as f: prefix = ExternalFile.upload(f, name='fizz.txt', user_id=self.staff1.id, prefix='test/../../', course_id=self.course.id) prefix_obj = prefix.object() self.assertEquals(self.blob1.driver.key, prefix_obj.driver.key) self.assertEquals(self.blob1.container.name, prefix.container) self.assertEquals(self.blob1.name, self.file1.object_name) self.assertEquals(prefix_obj.name, 'test_.._.._fizz.txt') self.assertEquals(prefix.filename, 'fizz.txt') prefix_obj.delete()
def test_malicious_directory_traversal(self): with open(CWD + "/files/fizzbuzz_after.py", 'rb') as f: prefix = ExternalFile.upload(f, name='fizz.txt', user_id=self.staff1.id, prefix='test/../../', course_id=self.course.id) prefix_obj = prefix.object() self.assertEqual(self.blob1.driver.key, prefix_obj.driver.key) self.assertEqual(self.blob1.container.name, prefix.container) self.assertEqual(self.blob1.name, self.file1.object_name) self.assertEqual(prefix_obj.name, self.test_malicious_directory_traversal_expected_obj_name) self.assertEqual(prefix.filename, 'fizz.txt') prefix_obj.delete()
def test_prefix(self): with open(CWD + "/files/fizzbuzz_after.py", 'rb') as f: prefix = ExternalFile.upload(f, name='fizz.txt', user_id=self.staff1.id, prefix='test/', course_id=self.course.id) prefix_obj = prefix.object() self.assertEqual(self.blob1.driver.key, prefix_obj.driver.key) self.assertEqual(self.blob1.container.name, prefix.container) self.assertEqual(self.blob1.name, self.file1.object_name) self.assertEqual(prefix_obj.name, self.test_prefix_expected_obj_name) self.assertEqual(prefix.filename, 'fizz.txt') prefix_obj.delete()
def test_permission(self): # Students can not access files of staff self.assertTrue(ExternalFile.can(self.file1, self.staff1, 'download')) self.assertFalse(ExternalFile.can(self.file1, self.user1, 'download')) self.assertFalse(ExternalFile.can(self.file1, self.lab_assistant1, 'download')) # Staff and student can access student files self.assertTrue(ExternalFile.can(self.file2, self.user1, 'download')) self.assertTrue(ExternalFile.can(self.file2, self.staff1, 'download')) self.assertFalse(ExternalFile.can(self.file2, self.user2, 'download'))
def export_grades(): logger = jobs.get_job_logger() current_user = jobs.get_current_job().user course = Course.query.get(jobs.get_current_job().course_id) assignments = course.assignments students = (Enrollment.query.options(db.joinedload('user')).filter( Enrollment.role == STUDENT_ROLE, Enrollment.course == course).all()) headers, assignments = get_headers(assignments) logger.info("Using these headers:") for header in headers: logger.info('\t' + header) logger.info('') total_students = len(students) users = [student.user for student in students] user_ids = [user.id for user in users] all_scores = collect_all_scores(assignments, user_ids) with io.StringIO() as f: writer = csv.writer(f) writer.writerow(headers) # write headers for i, student in enumerate(students, start=1): row = export_student_grades(student, assignments, all_scores) writer.writerow(row) if i % 50 == 0: logger.info('Exported {}/{}'.format(i, total_students)) f.seek(0) created_time = local_time(dt.datetime.now(), course, fmt='%b-%-d %Y at %I-%M%p') csv_filename = '{course_name} Grades ({date}).csv'.format( course_name=course.display_name, date=created_time) # convert to bytes for csv upload csv_bytes = io.BytesIO(bytearray(f.read(), 'utf-8')) upload = ExternalFile.upload(csv_bytes, user_id=current_user.id, name=csv_filename, course_id=course.id, prefix='jobs/exports/{}/'.format( course.offering)) logger.info('\nDone!\n') logger.info("Saved as: {0}".format(upload.object_name)) return "/files/{0}".format(encode_id(upload.id))
def test_group_permission(self): Group.invite(self.user1, self.user2, self.assignment) group = Group.lookup(self.user1, self.assignment) # Only the original creator and staff can accept the files self.assertTrue(ExternalFile.can(self.file2, self.user1, 'download')) self.assertTrue(ExternalFile.can(self.file2, self.staff1, 'download')) self.assertFalse(ExternalFile.can(self.file2, self.user2, 'download')) group.accept(self.user2) # Now all group members can access the files self.assertTrue(ExternalFile.can(self.file2, self.user1, 'download')) self.assertTrue(ExternalFile.can(self.file2, self.staff1, 'download')) self.assertTrue(ExternalFile.can(self.file2, self.user2, 'download')) self.assertFalse(ExternalFile.can(self.file2, self.user3, 'download'))
def test_job(duration=0, should_fail=False, make_file=False): logger = jobs.get_job_logger() logger.info('Starting...') time.sleep(duration) if should_fail: 1/0 if make_file: upload = ExternalFile.upload(data(duration+1), user_id=1, course_id=1, name='temp.okfile', prefix='jobs/example/') logger.info("Saved as: {}".format(upload.object_name)) logger.info('File ID: {0}'.format(encode_id(upload.id))) msg = ("Waited for <a href='/files/{0}'> {1} seconds </a>" .format(encode_id(upload.id), duration)) else: msg = "Waited for <b>{}</b> seconds!".format(duration) logger.info('Finished!') return msg
def export_assignment(assignment_id, anonymized): """ Generate a zip file of submissions from enrolled students. Final Submission: One submission per student/group Zip Strucutre: cal-cs61a../[email protected]@b.com/abc12d/hog.py Anonymized: Submission without identifying info Zip Strucutre: cal-cs61a../{hash}/hog.py """ logger = jobs.get_job_logger() assignment = Assignment.query.get(assignment_id) requesting_user = jobs.get_current_job().user if not assignment: logger.warning("No assignment found") raise Exception("No Assignment") if not Assignment.can(assignment, requesting_user, "download"): raise Exception("{} does not have enough permission" .format(requesting_user.email)) if anonymized: logger.info("Starting anonymized submission export") else: logger.info("Starting final submission export") course = assignment.course with io.BytesIO() as bio: # Get a handle to the in-memory zip in append mode with zipfile.ZipFile(bio, "w", zipfile.ZIP_DEFLATED, False) as zf: zf.external_attr = 0o655 << 16 export_loop(bio, zf, logger, assignment, anonymized) created_time = local_time(dt.datetime.now(), course, fmt='%m-%d-%I-%M-%p') zip_name = '{}_{}.zip'.format(assignment.name.replace('/', '-'), created_time) bio.seek(0) # Close zf handle to finish writing zipfile logger.info("Uploading...") upload = ExternalFile.upload(bio, user_id=requesting_user.id, name=zip_name, course_id=course.id, prefix='jobs/exports/{}/'.format(course.offering)) logger.info("Saved as: {0}".format(upload.object_name)) msg = "/files/{0}".format(encode_id(upload.id)) return msg
def test_job(duration=0, should_fail=False, make_file=False): logger = jobs.get_job_logger() logger.info('Starting...') time.sleep(duration) if should_fail: 1 / 0 if make_file: upload = ExternalFile.upload(data(duration + 1), user_id=1, course_id=1, name='temp.okfile', prefix='jobs/example/') logger.info("Saved as: {}".format(upload.object_name)) logger.info('File ID: {0}'.format(encode_id(upload.id))) msg = ("Waited for <a href='/files/{0}'> {1} seconds </a>".format( encode_id(upload.id), duration)) else: msg = "Waited for <b>{}</b> seconds!".format(duration) logger.info('Finished!') return msg
def export_grades(): logger = jobs.get_job_logger() current_user = jobs.get_current_job().user course = Course.query.get(jobs.get_current_job().course_id) assignments = course.assignments students = (Enrollment.query .options(db.joinedload('user')) .filter(Enrollment.role == STUDENT_ROLE, Enrollment.course == course) .all()) headers, assignments = get_headers(assignments) logger.info("Using these headers:") for header in headers: logger.info('\t' + header) logger.info('') total_students = len(students) with io.StringIO() as f: writer = csv.writer(f) writer.writerow(headers) # write headers for i, student in enumerate(students, start=1): row = export_student_grades(student, assignments) writer.writerow(row) if i % 50 == 0: logger.info('Exported {}/{}'.format(i, total_students)) f.seek(0) created_time = local_time(dt.datetime.now(), course, fmt='%b-%-d %Y at %I-%M%p') csv_filename = '{course_name} Grades ({date}).csv'.format( course_name=course.display_name, date=created_time) # convert to bytes for csv upload csv_bytes = io.BytesIO(bytearray(f.read(), 'utf-8')) upload = ExternalFile.upload(csv_bytes, user_id=current_user.id, name=csv_filename, course_id=course.id, prefix='jobs/exports/{}/'.format(course.offering)) logger.info('\nDone!\n') logger.info("Saved as: {0}".format(upload.object_name)) return "/files/{0}".format(encode_id(upload.id))
def file_download(file_id, user): ext_file = ExternalFile.query.filter_by(id=file_id, deleted=False).first() if not ext_file or not ExternalFile.can(ext_file, user, 'download'): logger.info("Access file without permission by {0}".format(user.email)) abort(404) try: storage_obj = ext_file.object() except libcloud.common.types.InvalidCredsError: logger.warning("Could not get file {0} - {1}".format( file_id, ext_file.filename), exc_info=True) storage_obj = None if storage_obj is None: abort(404, "File does not exist") basename = os.path.basename(ext_file.filename) # Do not use .download_url for local storage. if storage.provider == libcloud.storage.types.Provider.LOCAL: response = Response(storage.get_object_stream(storage_obj), mimetype=ext_file.mimetype) response.headers["Content-Security-Policy"] = "default-src 'none';" response.headers["X-Content-Type-Options"] = "nosniff" response.headers["Content-Disposition"] = ( "attachment; filename={0!s}".format(basename)) return response else: postpend = '&' if request.args.get('raw'): postpend += urlencode({'response-content-type': ext_file.mimetype}) elif request.args.get('download'): postpend += urlencode({ 'response-content-disposition': 'attachment', 'filename': basename }) url = storage.get_blob_url(storage_obj.name) return redirect(url + postpend)
def submit_assignment(name): # TODO: Unify student & staff upload. assign = get_assignment(name) group = Group.lookup(current_user, assign) user_ids = assign.active_user_ids(current_user.id) fs = assign.final_submission(user_ids) if not assign.uploads_enabled: flash("This assignment cannot be submitted online", 'warning') return redirect(url_for('.assignment', name=assign.name)) extension = None # No need for an extension if not assign.active: extension = Extension.get_extension(current_user, assign) if not extension: flash("It's too late to submit this assignment", 'warning') return redirect(url_for('.assignment', name=assign.name)) if request.method == "POST": backup = Backup.create( submitter=current_user, assignment=assign, submit=True, ) assignment = backup.assignment if extension: backup.custom_submission_time = extension.custom_submission_time templates = assignment.files or [] files = {} def extract_file_index(file_ind): """ Get the index of of file objects. Used because request.files.getlist() does not handle uniquely indexed lists. >>> extract_file_index('file[12']) 12 """ brace_loc = file_ind.find('[') index_str = file_ind[brace_loc+1:-1] return int(index_str) # A list of one element lists sorted_uploads = sorted(list(request.files.items()), key=lambda x: extract_file_index(x[0])) uploads = [v[1] for v in sorted_uploads] full_path_names = list(request.form.listvalues())[0] template_files = assign.files or [] file_names = [os.path.split(f)[1] for f in full_path_names] missing = [t for t in template_files if t not in file_names] if missing: return jsonify({ 'error': ('Missing files: {}. The following files are required: {}' .format(', '.join(missing), ', '.join(template_files))) }), 400 backup_folder_postfix = time.time() for full_path, upload in zip(full_path_names, uploads): data = upload.read() if len(data) > MAX_UPLOAD_FILE_SIZE: # file is too large (over 25 MB) return jsonify({ 'error': ('{} is larger than the maximum file size of {} MB' .format(full_path, MAX_UPLOAD_FILE_SIZE/1024/1024)) }), 400 try: files[full_path] = str(data, 'utf-8') except UnicodeDecodeError: upload.stream.seek(0) # We've already read data, so reset before uploading dest_folder = "uploads/{}/{}/{}/".format(assign.name, current_user.id, backup_folder_postfix) bin_file = ExternalFile.upload(upload.stream, current_user.id, full_path, staff_file=False, prefix=dest_folder, course_id=assign.course.id, backup=backup, assignment_id=assign.id) db.session.add(bin_file) message = Message(kind='file_contents', contents=files) backup.messages.append(message) db.session.add(backup) db.session.commit() # Send to continuous autograder if assign.autograding_key and assign.continuous_autograding: try: submit_continuous(backup) except ValueError as e: flash('Did not send to autograder: {}'.format(e), 'warning') return jsonify({ 'backup': backup.hashid, 'url': url_for('.code', name=assign.name, submit=backup.submit, bid=backup.id) }) return render_template('student/assignment/submit.html', assignment=assign, group=group, course=assign.course)