Exemplo n.º 1
0
def score_from_csv(assign_id, rows, kind='total', invalid=None, message=None):
    """
    Job for uploading Scores.

    @param ``rows`` should be a list of records (mappings),
        with labels `email` and `score`
    """
    log = jobs.get_job_logger()
    current_user = jobs.get_current_job().user
    assign = Assignment.query.get(assign_id)

    message = message or '{} score for {}'.format(kind.title(), assign.display_name)

    def log_err(msg):
        log.info('\t!  {}'.format(msg))

    log.info("Uploading scores for {}:\n".format(assign.display_name))

    if invalid:
        log_err('skipping {} invalid entries on lines:'.format(len(invalid)))
        for line in invalid:
            log_err('\t{}'.format(line))
        log.info('')

    success, total = 0, len(rows)
    for i, row in enumerate(rows, start=1):
        try:
            email, score = row['email'], row['score']
            user = User.query.filter_by(email=email).one()

            backup = Backup.query.filter_by(assignment=assign, submitter=user, submit=True).first()
            if not backup:
                backup = Backup.create(submitter=user, assignment=assign, submit=True)

            uploaded_score = Score(grader=current_user, assignment=assign,
                    backup=backup, user=user, score=score, kind=kind, message=message)

            db.session.add(uploaded_score)
            uploaded_score.archive_duplicates()

        except SQLAlchemyError:
            print_exc()
            log_err('error: user with email `{}` does not exist'.format(email))
        else:
            success += 1
        if i % 100 == 0:
            log.info('\nUploaded {}/{} Scores\n'.format(i, total))
    db.session.commit()

    log.info('\nSuccessfully uploaded {} "{}" scores (with {} errors)'.format(success, kind, total - success))

    return '/admin/course/{cid}/assignments/{aid}/scores'.format(
                cid=jobs.get_current_job().course_id, aid=assign_id)
Exemplo n.º 2
0
def export_grades():
    logger = jobs.get_job_logger()
    current_user = jobs.get_current_job().user
    course = Course.query.get(jobs.get_current_job().course_id)
    assignments = course.assignments
    students = (Enrollment.query.options(db.joinedload('user')).filter(
        Enrollment.role == STUDENT_ROLE, Enrollment.course == course).all())

    headers, assignments = get_headers(assignments)
    logger.info("Using these headers:")
    for header in headers:
        logger.info('\t' + header)
    logger.info('')

    total_students = len(students)

    users = [student.user for student in students]
    user_ids = [user.id for user in users]

    all_scores = collect_all_scores(assignments, user_ids)

    with io.StringIO() as f:
        writer = csv.writer(f)
        writer.writerow(headers)  # write headers

        for i, student in enumerate(students, start=1):
            row = export_student_grades(student, assignments, all_scores)
            writer.writerow(row)
            if i % 50 == 0:
                logger.info('Exported {}/{}'.format(i, total_students))
        f.seek(0)
        created_time = local_time(dt.datetime.now(),
                                  course,
                                  fmt='%b-%-d %Y at %I-%M%p')
        csv_filename = '{course_name} Grades ({date}).csv'.format(
            course_name=course.display_name, date=created_time)
        # convert to bytes for csv upload
        csv_bytes = io.BytesIO(bytearray(f.read(), 'utf-8'))
        upload = ExternalFile.upload(csv_bytes,
                                     user_id=current_user.id,
                                     name=csv_filename,
                                     course_id=course.id,
                                     prefix='jobs/exports/{}/'.format(
                                         course.offering))

    logger.info('\nDone!\n')
    logger.info("Saved as: {0}".format(upload.object_name))
    return "/files/{0}".format(encode_id(upload.id))
Exemplo n.º 3
0
def autograde_assignment(assignment_id):
    """Autograde all enrolled students for this assignment.

    We set up a state machine for each backup to check its progress through
    the autograder. If any step takes too long, we'll retry autograding that
    backup. Ultimately, a backup is considered done when we confirm that
    we've received a new score, or if we have reached the retry limit.
    """
    logger = jobs.get_job_logger()
    assignment = Assignment.query.get(assignment_id)
    course_submissions = assignment.course_submissions(include_empty=False)
    backup_ids = set(fs['backup']['id'] for fs in course_submissions if fs['backup'])
    try:
        autograde_backups(assignment, jobs.get_current_job().user_id, backup_ids, logger)
    except ValueError:
        logger.info('Could not autograde backups - Please add an autograding key.')
        return
    return '/admin/course/{cid}/assignments/{aid}/scores'.format(
                cid=jobs.get_current_job().course_id, aid=assignment.id)
Exemplo n.º 4
0
def email_scores(assignment_id, score_tags, subject, body,
                 reply_to=None, dry_run=False):
    log = jobs.get_job_logger()
    job_creator = jobs.get_current_job().user

    assign = Assignment.query.get(assignment_id)

    students = [e.user for e in (Enrollment.query
                        .options(db.joinedload('user'))
                        .filter(Enrollment.role == STUDENT_ROLE,
                                Enrollment.course == assign.course)
                        .all())]

    email_counter = 0
    seen_ids = set()
    for student in students:
        if student.id in seen_ids:
            continue
        user_ids = assign.active_user_ids(student.id)
        seen_ids |= user_ids
        scores = [s for s in assign.scores(user_ids) if s.kind in score_tags]
        if scores:
            users = User.query.filter(User.id.in_(user_ids))
            primary, cc = users[0].email, [u.email for u in users[1:]]
            if dry_run:
                primary, cc = job_creator.email, []

            result = send_email(primary,
                subject, body,
                cc=cc,
                template='email/scores.html',
                title=subject,
                from_name=assign.course.display_name,
                scores=scores,
                reply_to=reply_to,
                link_text="View on okpy.org",
                link="https://okpy.org/" + assign.name, # Don't have url_for
                assignment=assign.display_name)

            if result:
                log.info("Sent to {}".format(', '.join([primary] + cc)))
                email_counter += 1

        # Send a few emails in dry run mode.
        if dry_run and email_counter >= 2:
            message = "Run with dry run mode"
            log.info(message)
            return message

    message = "Sent {} emails".format(email_counter)
    log.info(message)
    return message
Exemplo n.º 5
0
def autograde_assignment(assignment_id):
    """Autograde all enrolled students for this assignment.

    We set up a state machine for each backup to check its progress through
    the autograder. If any step takes too long, we'll retry autograding that
    backup. Ultimately, a backup is considered done when we confirm that
    we've received a new score, or if we have reached the retry limit.
    """
    logger = jobs.get_job_logger()
    assignment = Assignment.query.get(assignment_id)
    course_submissions = assignment.course_submissions(include_empty=False)
    backup_ids = set(fs['backup']['id'] for fs in course_submissions
                     if fs['backup'])
    try:
        autograde_backups(assignment,
                          jobs.get_current_job().user_id, backup_ids, logger)
    except ValueError:
        logger.info(
            'Could not autograde backups - Please add an autograding key.')
        return
    return '/admin/course/{cid}/assignments/{aid}/scores'.format(
        cid=jobs.get_current_job().course_id, aid=assignment.id)
Exemplo n.º 6
0
def export_grades():
    logger = jobs.get_job_logger()
    current_user = jobs.get_current_job().user
    course = Course.query.get(jobs.get_current_job().course_id)
    assignments = course.assignments
    students = (Enrollment.query
      .options(db.joinedload('user'))
      .filter(Enrollment.role == STUDENT_ROLE, Enrollment.course == course)
      .all())

    headers, assignments = get_headers(assignments)
    logger.info("Using these headers:")
    for header in headers:
        logger.info('\t' + header)
    logger.info('')

    total_students = len(students)
    with io.StringIO() as f:
        writer = csv.writer(f)
        writer.writerow(headers) # write headers

        for i, student in enumerate(students, start=1):
            row = export_student_grades(student, assignments)
            writer.writerow(row)
            if i % 50 == 0:
                logger.info('Exported {}/{}'.format(i, total_students))
        f.seek(0)
        created_time = local_time(dt.datetime.now(), course, fmt='%b-%-d %Y at %I-%M%p')
        csv_filename = '{course_name} Grades ({date}).csv'.format(
                course_name=course.display_name, date=created_time)
        # convert to bytes for csv upload
        csv_bytes = io.BytesIO(bytearray(f.read(), 'utf-8'))
        upload = ExternalFile.upload(csv_bytes, user_id=current_user.id, name=csv_filename,
                         course_id=course.id,
                         prefix='jobs/exports/{}/'.format(course.offering))

    logger.info('\nDone!\n')
    logger.info("Saved as: {0}".format(upload.object_name))
    return "/files/{0}".format(encode_id(upload.id))
Exemplo n.º 7
0
def assign_scores(assign_id, score, kind, message, deadline,
                     include_backups=True):
    logger = jobs.get_job_logger()
    current_user = jobs.get_current_job().user

    assignment = Assignment.query.get(assign_id)
    students = [e.user_id for e in assignment.course.get_students()]
    submission_time = server_time_obj(deadline, assignment.course)

    # Find all submissions (or backups) before the deadline
    backups = Backup.query.filter(
        Backup.assignment_id == assign_id,
        or_(Backup.created <= deadline, Backup.custom_submission_time <= deadline)
    ).order_by(Backup.created.desc()).group_by(Backup.submitter_id)

    if not include_backups:
        backups = backups.filter(Backup.submit == True)

    all_backups =  backups.all()

    if not all_backups:
        logger.info("No submissions were found with a deadline of {}."
                    .format(deadline))
        return "No Scores Created"

    total_count = len(all_backups)
    logger.info("Found {} eligible submissions...".format(total_count))

    score_counter, seen = 0, set()

    for back in all_backups:
        if back.creator in seen:
            score_counter += 1
            continue
        new_score = Score(score=score, kind=kind, message=message,
                          user_id=back.submitter_id,
                          assignment=assignment, backup=back,
                          grader=current_user)
        db.session.add(new_score)
        new_score.archive_duplicates()
        db.session.commit()

        score_counter += 1
        if score_counter % 5 == 0:
            logger.info("Scored {} of {}".format(score_counter, total_count))
        seen |= back.owners()

    result = "Left {} '{}' scores of {}".format(score_counter, kind.title(), score)
    logger.info(result)
    return result
Exemplo n.º 8
0
def autograde_assignment(assignment_id):
    """Autograde all enrolled students for this assignment.

    We set up a state machine for each backup to check its progress through
    the autograder. If any step takes too long, we'll retry autograding that
    backup. Ultimately, a backup is considered done when we confirm that
    we've received a new score, or if we have reached the retry limit.
    """
    logger = jobs.get_job_logger()
    assignment = Assignment.query.get(assignment_id)
    course_submissions = assignment.course_submissions(include_empty=False)
    backup_ids = set(fs['backup']['id'] for fs in course_submissions
                     if fs['backup'])
    return autograde_backups(assignment,
                             jobs.get_current_job().user_id, backup_ids,
                             logger)
Exemplo n.º 9
0
def export_assignment(assignment_id, anonymized):
    """ Generate a zip file of submissions from enrolled students.

    Final Submission: One submission per student/group
        Zip Strucutre: cal-cs61a../[email protected]@b.com/abc12d/hog.py
    Anonymized: Submission without identifying info
        Zip Strucutre: cal-cs61a../{hash}/hog.py
    """
    logger = jobs.get_job_logger()

    assignment = Assignment.query.get(assignment_id)
    requesting_user = jobs.get_current_job().user

    if not assignment:
        logger.warning("No assignment found")
        raise Exception("No Assignment")

    if not Assignment.can(assignment, requesting_user, "download"):
        raise Exception("{} does not have enough permission"
                        .format(requesting_user.email))
    if anonymized:
        logger.info("Starting anonymized submission export")
    else:
        logger.info("Starting final submission export")
    course = assignment.course
    with io.BytesIO() as bio:
        # Get a handle to the in-memory zip in append mode
        with zipfile.ZipFile(bio, "w", zipfile.ZIP_DEFLATED, False) as zf:
            zf.external_attr = 0o655 << 16
            export_loop(bio, zf, logger, assignment, anonymized)
            created_time = local_time(dt.datetime.now(), course, fmt='%m-%d-%I-%M-%p')
            zip_name = '{}_{}.zip'.format(assignment.name.replace('/', '-'), created_time)

        bio.seek(0)
        # Close zf handle to finish writing zipfile
        logger.info("Uploading...")
        upload = ExternalFile.upload(bio, user_id=requesting_user.id, name=zip_name,
                                     course_id=course.id,
                                     prefix='jobs/exports/{}/'.format(course.offering))

    logger.info("Saved as: {0}".format(upload.object_name))
    msg = "/files/{0}".format(encode_id(upload.id))
    return msg
Exemplo n.º 10
0
def autograde_assignment(assignment_id):
    """Autograde all enrolled students for this assignment.

    We set up a state machine for each backup to check its progress through
    the autograder. If any step takes too long, we'll retry autograding that
    backup. Ultimately, a backup is considered done when we confirm that
    we've received a new score, or if we have reached the retry limit.
    """
    logger = jobs.get_job_logger()

    assignment = Assignment.query.get(assignment_id)
    course_submissions = assignment.course_submissions(include_empty=False)
    backup_ids = set(fs['backup']['id'] for fs in course_submissions
                     if fs['backup'])
    token = create_autograder_token(jobs.get_current_job().user_id)

    # start by sending a batch of all backups
    start_time = time.time()
    job_ids = send_batch(token, assignment, backup_ids)
    tasks = [
        GradingTask(
            status=GradingStatus.QUEUED,
            backup_id=backup_id,
            job_id=job_id,
            retries=0,
        ) for backup_id, job_id in job_ids.items()
    ]
    num_tasks = len(tasks)

    def retry_task(task):
        if task.retries >= MAX_RETRIES:
            logger.error(
                'Did not receive a score for backup {} after {} retries'.
                format(utils.encode_id(task.backup_id), MAX_RETRIES))
            task.set_status(GradingStatus.FAILED)
        else:
            task.set_status(GradingStatus.QUEUED)
            task.job_id = autograde_backup(token, assignment, task.backup_id)
            task.retries += 1

    while True:
        time.sleep(POLL_INTERVAL)
        results = check_job_results([task.job_id for task in tasks])

        graded = len([
            task for task in tasks
            if task.status in (GradingStatus.DONE, GradingStatus.FAILED)
        ])
        logger.info('Graded {:>4}/{} ({:>5.1f}%)'.format(
            graded, num_tasks, 100 * graded / num_tasks))
        if graded == num_tasks:
            break

        for task in tasks:
            hashid = utils.encode_id(task.backup_id)
            if task.status == GradingStatus.QUEUED:
                result = results[task.job_id]
                if not result:
                    logger.warning(
                        'Autograder job {} disappeared, retrying'.format(
                            task.job_id))
                    retry_task(task)
                elif result['status'] != 'queued':
                    logger.debug(
                        'Autograder job {} for backup {} started'.format(
                            task.job_id, hashid))
                    task.set_status(GradingStatus.RUNNING)
                elif task.expired(QUEUED_TIMEOUT):
                    logger.warning(
                        'Autograder job {} queued longer than {} seconds, retrying'
                        .format(task.job_id, QUEUED_TIMEOUT))
                    retry_task(task)
            elif task.status == GradingStatus.RUNNING:
                result = results[task.job_id]
                if not result:
                    logger.warning(
                        'Autograder job {} disappeared, retrying'.format(
                            task.job_id))
                    retry_task(task)
                elif result['status'] == 'finished':
                    logger.debug(
                        'Autograder job {} for backup {} finished'.format(
                            task.job_id, hashid))
                    task.set_status(GradingStatus.WAITING)
                elif result['status'] == 'failed':
                    logger.warning('Autograder job {} failed, retrying'.format(
                        task.job_id))
                    retry_task(task)
                elif task.expired(RUNNING_TIMEOUT):
                    logger.warning(
                        'Autograder job {} running longer than {} seconds, retrying'
                        .format(task.job_id, RUNNING_TIMEOUT))
                    retry_task(task)
            elif task.status == GradingStatus.WAITING:
                score = Score.query.filter(
                    Score.backup_id == task.backup_id, Score.archived == False,
                    Score.created >
                    datetime.datetime.fromtimestamp(start_time)).first()
                if score:
                    logger.debug('Received score for backup {}'.format(hashid))
                    task.set_status(GradingStatus.DONE)
                elif task.expired(WAITING_TIMEOUT):
                    logger.warning(
                        'Did not receive score for backup {} in {} seconds, retrying'
                        .format(hashid, WAITING_TIMEOUT))
                    retry_task(task)

    # report summary
    statuses = collections.Counter(task.status for task in tasks)
    message = '{} graded, {} failed'.format(statuses[GradingStatus.DONE],
                                            statuses[GradingStatus.FAILED])
    logger.info(message)
    return message
Exemplo n.º 11
0
def grade_on_effort(assignment_id, full_credit, late_multiplier, required_questions, grading_url):
    logger = jobs.get_job_logger()

    current_user = jobs.get_current_job().user
    assignment = Assignment.query.get(assignment_id)
    submissions = assignment.course_submissions(include_empty=False)

    # archive all previous effort scores for this assignment
    scores = Score.query.filter(
        Score.kind == 'effort',
        Score.assignment_id == assignment_id).all()
    for score in scores:
        db.session.delete(score)

    seen = set()
    stats = Counter()
    manual, late, not_perfect = [], [], []
    for i, subm in enumerate(submissions, 1):
        user_id = int(subm['user']['id'])
        if user_id in seen:
            continue

        latest_backup = Backup.query.get(subm['backup']['id'])
        submission_time = get_submission_time(latest_backup, assignment)
        backup, submission_time = find_best_scoring(latest_backup,
                submission_time, assignment, required_questions, full_credit)

        try:
            score, messages = effort_score(backup, full_credit, required_questions)
        except AssertionError:
            manual.append(backup)
            continue
        else:
            score, messages = handle_late(backup, assignment,
                    late, submission_time, score, messages, late_multiplier)

        if score < full_credit and backup.hashid not in late:
            not_perfect.append(backup)

        messages.append('\nFinal Score: {}'.format(score))
        messages.append('Your final score will be the max of either this score or the `Total` score (if exists)')
        new_score = Score(score=score, kind='effort',
                message='\n'.join(messages), user_id=backup.submitter_id,
                assignment=assignment, backup=backup, grader=current_user)
        db.session.add(new_score)

        if i % 100 == 0:
            logger.info('Scored {}/{}'.format(i, len(submissions)))

        if subm['group']:
            member_ids = {int(id) for id in subm['group']['group_member'].split(',')}
            seen |= member_ids
            stats[score] += len(member_ids)
        else:
            seen.add(user_id)
            stats[score] += 1

    # Commit all scores at once
    db.session.commit()

    logger.info('Scored {}/{}'.format(i, len(submissions)))
    logger.info('done!')

    if len(late) > 0:
        logger.info('\n{} Late:'.format(len(late)))
        for backup_id in late:
            logger.info('  {}'.format(grading_url + backup_id))

    logger.info('\nScore Distribution:')
    sorted_scores = sorted(stats.items(), key=lambda p: -p[0])
    for score, count in sorted_scores:
        logger.info('  {} - {}'.format(str(score).rjust(3), count))

    needs_autograding = len(manual) + len(not_perfect)
    if needs_autograding > 0:
        logger.info('\nAutograding {} manual and/or not perfect backups'.format(needs_autograding))
        backup_ids = [backup.id for backup in manual + not_perfect]
        try:
            autograde_backups(assignment, current_user.id, backup_ids, logger)
        except ValueError:
            logger.info('Could not autograde backups - Please add an autograding key.')

    db.session.commit()
    return '/admin/course/{cid}/assignments/{aid}/scores'.format(
                cid=jobs.get_current_job().course_id, aid=assignment_id)
Exemplo n.º 12
0
def assign_scores(assign_id,
                  score,
                  kind,
                  message,
                  deadline,
                  include_backups=True,
                  grade_backups=False):
    logger = jobs.get_job_logger()
    current_user = jobs.get_current_job().user

    assignment = Assignment.query.get(assign_id)
    students = [e.user_id for e in assignment.course.get_students()]
    submission_time = server_time_obj(deadline, assignment.course)

    # Find all submissions (or backups) before the deadline
    backups = Backup.query.filter(
        Backup.assignment_id == assign_id,
        or_(Backup.created <= deadline,
            Backup.custom_submission_time <= deadline)).group_by(
                Backup.submitter_id).order_by(Backup.created.desc())

    if not include_backups:
        backups = backups.filter(Backup.submit == True)

    all_backups = backups.all()

    if not all_backups:
        logger.info("No submissions were found with a deadline of {}.".format(
            deadline))
        return "No Scores Created"

    score_counter, seen = 0, set()

    unique_backups = []

    for back in all_backups:
        if back.creator not in seen:
            unique_backups.append(back)
            seen |= back.owners()

    total_count = len(unique_backups)
    logger.info(
        "Found {} unique and eligible submissions...".format(total_count))

    if grade_backups:
        logger.info('\nAutograding {} backups'.format(total_count))
        backup_ids = [back.id for back in unique_backups]
        try:
            autograde_backups(assignment, current_user.id, backup_ids, logger)
        except ValueError:
            logger.info(
                'Could not autograde backups - Please add an autograding key.')
    else:
        for back in unique_backups:
            new_score = Score(score=score,
                              kind=kind,
                              message=message,
                              user_id=back.submitter_id,
                              assignment=assignment,
                              backup=back,
                              grader=current_user)

            db.session.add(new_score)
            new_score.archive_duplicates()

            score_counter += 1
            if score_counter % 100 == 0:
                logger.info("Scored {} of {}".format(score_counter,
                                                     total_count))

        # only commit if all scores were successfully added
        db.session.commit()

    logger.info("Left {} '{}' scores of {}".format(score_counter, kind.title(),
                                                   score))
    return '/admin/course/{cid}/assignments/{aid}/scores'.format(
        cid=jobs.get_current_job().course_id, aid=assignment.id)
Exemplo n.º 13
0
def assign_scores(assign_id, score, kind, message, deadline,
                     include_backups=True, grade_backups=False):
    logger = jobs.get_job_logger()
    current_user = jobs.get_current_job().user

    assignment = Assignment.query.get(assign_id)
    students = [e.user_id for e in assignment.course.get_students()]
    submission_time = server_time_obj(deadline, assignment.course)

    # Find all submissions (or backups) before the deadline
    backups = Backup.query.filter(
        Backup.assignment_id == assign_id,
        or_(Backup.created <= deadline, Backup.custom_submission_time <= deadline)
    ).group_by(Backup.submitter_id).order_by(Backup.created.desc())

    if not include_backups:
        backups = backups.filter(Backup.submit == True)

    all_backups =  backups.all()

    if not all_backups:
        logger.info("No submissions were found with a deadline of {}."
                    .format(deadline))
        return "No Scores Created"

    score_counter, seen = 0, set()

    unique_backups = []

    for back in all_backups:
        if back.creator not in seen:
            unique_backups.append(back)
            seen |= back.owners()

    total_count = len(unique_backups)
    logger.info("Found {} unique and eligible submissions...".format(total_count))

    if grade_backups:
        logger.info('\nAutograding {} backups'.format(total_count))
        backup_ids = [back.id for back in unique_backups]
        try:
            autograde_backups(assignment, current_user.id, backup_ids, logger)
        except ValueError:
            logger.info('Could not autograde backups - Please add an autograding key.')
    else:
        for back in unique_backups:
            new_score = Score(score=score, kind=kind, message=message,
                              user_id=back.submitter_id,
                              assignment=assignment, backup=back,
                              grader=current_user)

            db.session.add(new_score)
            new_score.archive_duplicates()

            score_counter += 1
            if score_counter % 100 == 0:
                logger.info("Scored {} of {}".format(score_counter, total_count))

        # only commit if all scores were successfully added
        db.session.commit()

    logger.info("Left {} '{}' scores of {}".format(score_counter, kind.title(), score))
    return '/admin/course/{cid}/assignments/{aid}/scores'.format(
                cid=jobs.get_current_job().course_id, aid=assignment.id)