예제 #1
0
 def test_invalid_type_write_submitty_date(self):
     testcases = ('2020-06-12 03:21:30.123+0000', 10)
     for testcase in testcases:
         with self.subTest(testcase):
             with self.assertRaises(TypeError) as cm:
                 dateutils.write_submitty_date(10)
             self.assertEqual(
                 "Invalid type. Expected datetime or datetime string,"
                 " got <class 'int'>.", str(cm.exception))
예제 #2
0
def log_stack_trace(job_id="UNKNOWN",
                    is_batch=False,
                    which_untrusted="",
                    jobname="",
                    timelabel="",
                    elapsed_time=-1,
                    trace=""):
    now = dateutils.get_current_time()
    datefile = "stack_traces_{0}.txt".format(datetime.strftime(now, "%Y%m%d"))
    autograding_log_file = os.path.join(AUTOGRADING_STACKTRACE_PATH, datefile)
    easy_to_read_date = dateutils.write_submitty_date(now, True)
    batch_string = "BATCH" if is_batch else ""
    if elapsed_time == "":
        elapsed_time = -1
    elapsed_time_string = "" if elapsed_time < 0 else '{:9.3f}'.format(
        elapsed_time)
    time_unit = "" if elapsed_time < 0 else "sec"
    with open(autograding_log_file, 'a') as myfile:
        try:
            fcntl.flock(myfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
            print("%s | %6s | %5s | %11s | %-75s | %-6s %9s %3s |\n%s" %
                  (easy_to_read_date, job_id, batch_string, which_untrusted,
                   jobname, timelabel, elapsed_time_string, time_unit, trace),
                  file=myfile)
            fcntl.flock(myfile, fcntl.LOCK_UN)
        except:
            print("Could not gain a lock on the log file.")
예제 #3
0
 def test_write_submitty_date(self, get_timezone):
     testcases = ((datetime(2020,
                            6,
                            12,
                            3,
                            21,
                            30,
                            tzinfo=pytz_timezone('UTC')),
                   '2020-06-12 03:21:30+0000'),
                  (datetime(2020,
                            12,
                            25,
                            3,
                            21,
                            30,
                            tzinfo=pytz_timezone('UTC')),
                   '2020-12-25 03:21:30+0000'),
                  (datetime(2020,
                            6,
                            12,
                            3,
                            21,
                            30,
                            123,
                            tzinfo=pytz_timezone('UTC')),
                   '2020-06-12 03:21:30+0000'),
                  (datetime(2020, 6, 12, 3, 21,
                            30), '2020-06-12 03:21:30-0400'),
                  (datetime(2020, 12, 12, 3, 21,
                            30), '2020-12-12 03:21:30-0500'))
     for testcase in testcases:
         with self.subTest(i=testcase[0]):
             self.assertEqual(testcase[1],
                              dateutils.write_submitty_date(testcase[0]))
예제 #4
0
def log_message(log_path,
                job_id="UNKNOWN",
                is_batch=False,
                which_untrusted="",
                jobname="",
                timelabel="",
                elapsed_time=-1,
                message=""):
    """ Given a log directory, create or append a message to a dated log file in that directory. """

    now = dateutils.get_current_time()
    datefile = datetime.strftime(now, "%Y%m%d") + ".txt"
    autograding_log_file = os.path.join(log_path, datefile)
    easy_to_read_date = dateutils.write_submitty_date(now, True)
    batch_string = "BATCH" if is_batch else ""
    if elapsed_time == "":
        elapsed_time = -1
    elapsed_time_string = "" if elapsed_time < 0 else '{:9.3f}'.format(
        elapsed_time)
    time_unit = "" if elapsed_time < 0 else "sec"
    parts = (easy_to_read_date, f"{job_id:>6s}", f"{batch_string:>5s}",
             f"{which_untrusted:>11s}", f"{jobname:75s}",
             f"{timelabel:6s} {elapsed_time_string:>9s} {time_unit:>3s}",
             message)
    write_to_log(autograding_log_file, parts)
예제 #5
0
def log_message(log_path,
                job_id="UNKNOWN",
                is_batch=False,
                which_untrusted="",
                jobname="",
                timelabel="",
                elapsed_time=-1,
                message=""):
    """ Given a log directory, create or append a message to a dated log file in that directory. """

    now = dateutils.get_current_time()
    datefile = datetime.strftime(now, "%Y%m%d") + ".txt"
    autograding_log_file = os.path.join(log_path, datefile)
    easy_to_read_date = dateutils.write_submitty_date(now, True)
    batch_string = "BATCH" if is_batch else ""
    if elapsed_time == "":
        elapsed_time = -1
    elapsed_time_string = "" if elapsed_time < 0 else '{:9.3f}'.format(
        elapsed_time)
    time_unit = "" if elapsed_time < 0 else "sec"
    with open(autograding_log_file, 'a') as myfile:
        try:
            fcntl.flock(myfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
            print(
                "%s | %6s | %5s | %11s | %-75s | %-6s %9s %3s | %s" %
                (easy_to_read_date, job_id, batch_string, which_untrusted,
                 jobname, timelabel, elapsed_time_string, time_unit, message),
                file=myfile)
            fcntl.flock(myfile, fcntl.LOCK_UN)
        except:
            print("Could not gain a lock on the log file.")
예제 #6
0
def log_container_meta(log_path, event="", name="", container="", time=0):
    """ Given a log file, create or append container meta data to a log file. """

    now = dateutils.get_current_time()
    easy_to_read_date = dateutils.write_submitty_date(now, True)
    time_unit = "sec"
    parts = (easy_to_read_date, name, container, event, f"{time:.3f}", time_unit)
    write_to_log(log_path, parts)
예제 #7
0
def main():
    args = parse_args()
    complete_config_json_path = os.path.join(SUBMITTY_DATA_DIR,
                                            'courses',
                                            args.semester,
                                            args.course,
                                            'config',
                                            'complete_config',
                                            'complete_config_' + args.assignment + '.json')
                                            
    if os.path.isfile(complete_config_json_path):
        with open(complete_config_json_path,'r', encoding='utf-8') as infile:
            config_file=json.load(infile)
    else:
        sys.exit(1)

    required_capabilities = config_file.get('required_capabilities','default')
    testcases = config_file.get('testcases',[])
    graded_file = {
        "semester": args.semester,
        "course": args.course,
        "gradeable": args.assignment,
        "required_capabilities": required_capabilities,
        "queue_time": dateutils.write_submitty_date(microseconds=True),
        "generate_output": True,
        "max_possible_grading_time" : -1,
        "who" : "build",
        "regrade" : False,
    }

    should_generated_output = False
    for testcase in testcases:  
        input_generation_commands = testcase.get('input_generation_commands',[])
        solution_containers = testcase.get('solution_containers',[])
        should_generate_solution = False
        for solution_container in solution_containers:
            if len(solution_container["commands"]) != 0 :
                should_generate_solution = True
                break

        if should_generate_solution and not input_generation_commands:
            path_grading_file = os.path.join(SUBMITTY_DATA_DIR, "to_be_graded_queue", "__".join([args.semester, args.course, args.assignment]))
            
            if os.path.isfile(path_grading_file):
                os.remove(path_grading_file)
            
            with open(path_grading_file, 'w') as grading_file:
                json.dump(graded_file, grading_file,sort_keys=True,indent=4)
            print("Starting to build generated output")
            break
예제 #8
0
def worker_process(which_machine,address,which_untrusted,my_server):

    # verify the DAEMON_USER is running this script
    if not int(os.getuid()) == int(DAEMON_UID):
        grade_items_logging.log_message(JOB_ID, message="ERROR: must be run by DAEMON_USER")
        raise SystemExit("ERROR: the grade_item.py script must be run by the DAEMON_USER")

    # ignore keyboard interrupts in the worker processes
    signal.signal(signal.SIGINT, signal.SIG_IGN)
    counter = 0

    servername_workername = "{0}_{1}".format(my_server, address)
    autograding_zip = os.path.join(SUBMITTY_DATA_DIR,"autograding_TODO",servername_workername+"_"+which_untrusted+"_autograding.zip")
    submission_zip = os.path.join(SUBMITTY_DATA_DIR,"autograding_TODO",servername_workername+"_"+which_untrusted+"_submission.zip")
    todo_queue_file = os.path.join(SUBMITTY_DATA_DIR,"autograding_TODO",servername_workername+"_"+which_untrusted+"_queue.json")

    while True:
        if os.path.exists(todo_queue_file):
            try:
                results_zip_tmp = grade_item.grade_from_zip(autograding_zip,submission_zip,which_untrusted)
                results_zip = os.path.join(SUBMITTY_DATA_DIR,"autograding_DONE",servername_workername+"_"+which_untrusted+"_results.zip")
                done_queue_file = os.path.join(SUBMITTY_DATA_DIR,"autograding_DONE",servername_workername+"_"+which_untrusted+"_queue.json")
                #move doesn't inherit the permissions of the destination directory. Copyfile does.
                shutil.copyfile(results_zip_tmp, results_zip)

                os.remove(results_zip_tmp)
                with open(todo_queue_file, 'r') as infile:
                    queue_obj = json.load(infile)
                    queue_obj["done_time"]=dateutils.write_submitty_date(microseconds=True)
                with open(done_queue_file, 'w') as outfile:
                    json.dump(queue_obj, outfile, sort_keys=True, indent=4)        
            except Exception as e:
                grade_items_logging.log_message(JOB_ID, message="ERROR attempting to unzip graded item: " + which_machine + " " + which_untrusted + " exception " + repr(e))
                with contextlib.suppress(FileNotFoundError):
                    os.remove(autograding_zip)
                with contextlib.suppress(FileNotFoundError):
                    os.remove(submission_zip)
            with contextlib.suppress(FileNotFoundError):
                os.remove(todo_queue_file)
            counter = 0
        else:
            if counter >= 10:
                print (which_machine,which_untrusted,"wait")
                counter = 0
            counter += 1
            time.sleep(1)
예제 #9
0
def log_message(is_batch, which_untrusted, jobname, timelabel, elapsed_time,
                message):
    now = dateutils.get_current_time()
    datefile = datetime.strftime(now, "%Y%m%d") + ".txt"
    autograding_log_file = os.path.join(AUTOGRADING_LOG_PATH, datefile)
    easy_to_read_date = dateutils.write_submitty_date(now)
    my_pid = os.getpid()
    parent_pid = os.getppid()
    batch_string = "BATCH" if is_batch else ""
    abbrev_jobname = jobname[len(SUBMITTY_DATA_DIR + "/courses/"):]
    time_unit = "" if elapsed_time == "" else "sec"
    with open(autograding_log_file, 'a') as myfile:
        fcntl.flock(myfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
        print("%s | %6s | %5s | %11s | %-75s | %-6s %5s %3s | %s" %
              (easy_to_read_date, my_pid, batch_string, which_untrusted,
               abbrev_jobname, timelabel, elapsed_time, time_unit, message),
              file=myfile)
        fcntl.flock(myfile, fcntl.LOCK_UN)
예제 #10
0
def log_stack_trace(job_id="UNKNOWN", is_batch=False, which_untrusted="", jobname="", timelabel="", elapsed_time=-1, trace=""):
    now = dateutils.get_current_time()
    datefile = "stack_traces_{0}.txt".format(datetime.strftime(now, "%Y%m%d"))
    autograding_log_file = os.path.join(AUTOGRADING_STACKTRACE_PATH, datefile)
    easy_to_read_date = dateutils.write_submitty_date(now, True)
    batch_string = "BATCH" if is_batch else ""
    if elapsed_time == "":
        elapsed_time = -1
    elapsed_time_string = "" if elapsed_time < 0 else '{:9.3f}'.format(elapsed_time)
    time_unit = "" if elapsed_time < 0 else "sec"
    with open(autograding_log_file, 'a') as myfile:
        try:
            fcntl.flock(myfile,fcntl.LOCK_EX | fcntl.LOCK_NB)
            print("%s | %6s | %5s | %11s | %-75s | %-6s %9s %3s |\n%s"
                  % (easy_to_read_date, job_id, batch_string, which_untrusted,
                     jobname, timelabel, elapsed_time_string, time_unit, trace),
                  file=myfile)
            fcntl.flock(myfile, fcntl.LOCK_UN)
        except:
            print("Could not gain a lock on the log file.")
예제 #11
0
    def log_stack_trace(
        self,
        trace: str,
        *,
        is_batch: bool = False,
        which_untrusted: str = '',
        job_id: Optional[str] = None,
        jobname: str = "",
        echo_source: Optional[str] = None,
    ):
        """Log a stack trace to this logger's configured stack trace directory."""
        job_id = job_id or self.job_id
        # Save the parameters to this trace so we can duplicate these on the
        # shipper's end once the job finishes.
        #
        # TODO: Maybe we want to store time info too? Might need to think a bit
        #       more in terms of the stack traces log file format.
        if self.capture_traces:
            self.accumulated_traces.append({
                'trace': trace,
                'is_batch': is_batch,
                'which_untrusted': which_untrusted,
                'job_id': job_id,
                'jobname': jobname,
            })
        # Always run this since this could be deleted without us knowing
        os.makedirs(self.stack_trace_dir, exist_ok=True)

        now = dateutils.get_current_time()
        easy_to_read_date = dateutils.write_submitty_date(now, True)

        message = f"[{easy_to_read_date}][{job_id:>6s}]\n"
        if echo_source is not None:
            message += f"== (Echoed from {echo_source})\n"
        message += f"== Batch? {is_batch}\n"
        message += f"== Which: {which_untrusted}\n"
        message += f"==   Job: {jobname}\n"
        for line in trace.splitlines():
            message += f"== {line}\n"
        message = message.strip()
        write_to_log(self.stack_trace_path, message)
예제 #12
0
 def log_message(
     self, message: str, *,
     is_batch: bool = False,
     which_untrusted: str = "",
     jobname: str = "",
     timelabel: str = "",
     elapsed_time: Optional[int] = None,
     job_id: Optional[str] = None
 ):
     """Log a message to this logger's configured log directory."""
     now = dateutils.get_current_time()
     easy_to_read_date = dateutils.write_submitty_date(now, True)
     batch_string = "BATCH" if is_batch else ""
     if elapsed_time is None:
         elapsed_time = -1
     elapsed_time_string = "" if elapsed_time < 0 else '{:9.3f}'.format(elapsed_time)
     time_unit = "" if elapsed_time < 0 else "sec"
     job_id = job_id or self.job_id
     parts = (easy_to_read_date, f"{job_id:>6s}", f"{batch_string:>5s}", f"{which_untrusted:>11s}",
             f"{jobname:75s}", f"{timelabel:6s} {elapsed_time_string:>9s} {time_unit:>3s}", message)
     write_to_log(self.log_path, ' | '.join((str(x) for x in parts)))
예제 #13
0
 def test_write_submitty_date_microseconds(self, get_timezone):
     testcases = (
         (datetime(2020, 6, 12, 3, 21, 30, tzinfo=pytz_timezone('UTC')),
          '2020-06-12 03:21:30.000+0000'),
         (datetime(2020,
                   6,
                   12,
                   3,
                   21,
                   30,
                   123500,
                   tzinfo=pytz_timezone('UTC')),
          '2020-06-12 03:21:30.123+0000'),
         (datetime(2020, 6, 12, 3, 21, 30,
                   211500), '2020-06-12 03:21:30.211-0400'),
     )
     for testcase in testcases:
         with self.subTest(i=testcase[0]):
             self.assertEqual(
                 testcase[1],
                 dateutils.write_submitty_date(testcase[0], True))
예제 #14
0
def log_message(is_batch=False,
                which_untrusted="",
                jobname="",
                timelabel="",
                elapsed_time=-1,
                message=""):
    now = dateutils.get_current_time()
    datefile = datetime.strftime(now, "%Y%m%d") + ".txt"
    autograding_log_file = os.path.join(AUTOGRADING_LOG_PATH, datefile)
    easy_to_read_date = dateutils.write_submitty_date(now, True)
    my_pid = os.getpid()
    parent_pid = os.getppid()
    batch_string = "BATCH" if is_batch else ""
    elapsed_time_string = "" if elapsed_time < 0 else '{:9.3f}'.format(
        elapsed_time)
    time_unit = "" if elapsed_time < 0 else "sec"
    with open(autograding_log_file, 'a') as myfile:
        fcntl.flock(myfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
        print("%s | %6s | %5s | %11s | %-75s | %-6s %9s %3s | %s" %
              (easy_to_read_date, my_pid, batch_string, which_untrusted,
               jobname, timelabel, elapsed_time_string, time_unit, message),
              file=myfile)
        fcntl.flock(myfile, fcntl.LOCK_UN)
예제 #15
0
def archive_autograding_results(config, working_directory: os.PathLike,
                                job_id: str, which_untrusted: str,
                                is_batch_job: bool, complete_config_obj: dict,
                                gradeable_config_obj: dict, queue_obj: dict,
                                is_test_environment: bool):
    """ After grading is finished, archive the results. """

    tmp_autograding = os.path.join(working_directory, "TMP_AUTOGRADING")
    tmp_submission = os.path.join(working_directory, "TMP_SUBMISSION")
    tmp_work = os.path.join(working_directory, "TMP_WORK")
    tmp_logs = os.path.join(working_directory, "TMP_SUBMISSION", "tmp_logs")
    tmp_results = os.path.join(working_directory, "TMP_RESULTS")
    submission_path = os.path.join(tmp_submission, "submission")
    random_output_path = os.path.join(tmp_work, 'random_output')

    if "generate_output" not in queue_obj:
        partial_path = os.path.join(queue_obj["gradeable"], queue_obj["who"],
                                    str(queue_obj["version"]))
        item_name = os.path.join(queue_obj["semester"], queue_obj["course"],
                                 "submissions", partial_path)
    elif queue_obj["generate_output"]:
        item_name = os.path.join(queue_obj["semester"], queue_obj["course"],
                                 "generated_output", queue_obj["gradeable"])
    results_public_dir = os.path.join(tmp_results, "results_public")
    results_details_dir = os.path.join(tmp_results, "details")
    patterns = complete_config_obj['autograding']

    # Copy work to details
    pattern_copy("work_to_details", patterns['work_to_details'], tmp_work,
                 results_details_dir, tmp_logs)

    # Copy work to public
    if 'work_to_public' in patterns:
        pattern_copy("work_to_public", patterns['work_to_public'], tmp_work,
                     results_public_dir, tmp_logs)

    if os.path.exists(random_output_path):
        pattern_copy("work_to_random_output", [
            os.path.join(random_output_path, '**', '*.txt'),
        ], tmp_work, tmp_results, tmp_logs)
    # timestamp of first access to the gradeable page
    first_access_string = ""
    # grab the submission time
    if "generate_output" in queue_obj and queue_obj["generate_output"]:
        submission_string = ""
    else:
        with open(
                os.path.join(tmp_submission, 'submission',
                             ".submit.timestamp"),
                'r') as submission_time_file:
            submission_string = submission_time_file.read().rstrip()
        # grab the first access to the gradeable page (if it exists)
        user_assignment_access_filename = os.path.join(
            tmp_submission, ".user_assignment_access.json")
        if os.path.exists(user_assignment_access_filename):
            with open(user_assignment_access_filename, 'r') as access_file:
                obj = json.load(access_file)
                first_access_string = obj[0]["timestamp"]

    history_file_tmp = os.path.join(tmp_submission, "history.json")
    history_file = os.path.join(tmp_results, "history.json")
    if os.path.isfile(history_file_tmp) and not is_test_environment:
        shutil.move(history_file_tmp, history_file)
        # fix permissions
        ta_group_id = os.stat(tmp_results).st_gid
        os.chown(history_file, int(config.submitty_users['daemon_uid']),
                 ta_group_id)
        add_permissions(history_file, stat.S_IRGRP)
    grading_finished = dateutils.get_current_time()
    grade_result = ""
    if "generate_output" not in queue_obj:
        try:
            shutil.copy(os.path.join(tmp_work, "grade.txt"), tmp_results)
            with open(os.path.join(tmp_work, "grade.txt")) as f:
                lines = f.readlines()
                for line in lines:
                    line = line.rstrip('\n')
                    if line.startswith("Automatic grading total:"):
                        grade_result = line
        except Exception as e:
            with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
                f.write(
                    f"\n\nERROR: Grading incomplete -- Could not process {os.path.join(tmp_work,'grade.txt')}"
                )
            config.logger.log_message(
                "ERROR: could not process grade.txt. See stack trace entry for more details.",
                job_id=job_id,
                is_batch=is_batch_job,
                which_untrusted=which_untrusted,
                jobname=item_name,
            )
            config.logger.log_stack_trace(
                traceback.format_exc(),
                job_id=job_id,
                is_batch=is_batch_job,
                which_untrusted=which_untrusted,
                jobname=item_name,
            )

        submission_datetime = dateutils.read_submitty_date(submission_string)
        submission_longstring = dateutils.write_submitty_date(
            submission_datetime)

        # compute lateness (if there is a due date / submission deadline)
        gradeable_deadline_string = gradeable_config_obj["date_due"]
        if gradeable_deadline_string is None:
            print("NO DEADLINE")
            gradeable_deadline_longstring = "None"
            seconds_late = 0
        else:
            print("DEADLINE IS '" + str(gradeable_deadline_string) + "'")
            gradeable_deadline_datetime = dateutils.read_submitty_date(
                gradeable_deadline_string)
            gradeable_deadline_longstring = dateutils.write_submitty_date(
                gradeable_deadline_datetime)
            seconds_late = int((submission_datetime -
                                gradeable_deadline_datetime).total_seconds())

        # compute the access duration in seconds (if it exists)
        access_duration = -1
        if first_access_string != "":
            first_access_datetime = dateutils.read_submitty_date(
                first_access_string)
            access_duration = int(
                (submission_datetime - first_access_datetime).total_seconds())

        # note: negative = not late
        grading_finished_longstring = dateutils.write_submitty_date(
            grading_finished)

        with open(os.path.join(tmp_submission, ".grading_began"), 'r') as f:
            grading_began_longstring = f.read()
        grading_began = dateutils.read_submitty_date(grading_began_longstring)

        gradingtime = (grading_finished - grading_began).total_seconds()

        queue_obj["gradingtime"] = gradingtime
        queue_obj["grade_result"] = grade_result
        queue_obj["which_untrusted"] = which_untrusted
        waittime = queue_obj["waittime"]

        try:

            # Make certain results.json is utf-8 encoded.
            results_json_path = os.path.join(tmp_work, 'results.json')
            with codecs.open(results_json_path,
                             'r',
                             encoding='utf-8',
                             errors='ignore') as infile:
                results_str = "".join(line.rstrip() for line in infile)
                results_obj = json.loads(results_str)
            with open(results_json_path, 'w') as outfile:
                json.dump(results_obj, outfile, indent=4)

            shutil.move(results_json_path,
                        os.path.join(tmp_results, "results.json"))
        except Exception as e:
            with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
                f.write(
                    f"\n\nERROR: Grading incomplete -- Could not open/write {os.path.join(tmp_work,'results.json')}"
                )
            config.logger.log_message(
                "ERROR: results.json read/write error",
                job_id=job_id,
                is_batch=is_batch_job,
                which_untrusted=which_untrusted,
                jobname=item_name,
            )
            config.logger.log_stack_trace(
                traceback.format_exc(),
                job_id=job_id,
                is_batch=is_batch_job,
                which_untrusted=which_untrusted,
                jobname=item_name,
            )

        # Rescue custom validator files
        custom_validator_output_directory = os.path.join(
            tmp_results, "custom_validator_output")
        pattern_copy("rescue_custom_validator_validation_jsons", [
            os.path.join(tmp_work, 'validation_results_*.json'),
        ], tmp_work, custom_validator_output_directory, tmp_logs)
        pattern_copy("rescue_custom_validator_logs", [
            os.path.join(tmp_work, 'validation_logfile_*.txt'),
        ], tmp_work, custom_validator_output_directory, tmp_logs)
        pattern_copy("rescue_custom_validator_errors", [
            os.path.join(tmp_work, 'validation_stderr_*.txt'),
        ], tmp_work, custom_validator_output_directory, tmp_logs)

        just_write_grade_history(history_file, gradeable_deadline_longstring,
                                 submission_longstring, seconds_late,
                                 first_access_string, access_duration,
                                 queue_obj["queue_time"],
                                 "BATCH" if is_batch_job else "INTERACTIVE",
                                 grading_began_longstring,
                                 int(waittime), grading_finished_longstring,
                                 int(gradingtime), grade_result,
                                 queue_obj.get("revision", None))

        with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
            f.write("FINISHED GRADING!\n")

        config.logger.log_message(grade_result,
                                  job_id=job_id,
                                  is_batch=is_batch_job,
                                  which_untrusted=which_untrusted,
                                  jobname=item_name,
                                  timelabel="grade:",
                                  elapsed_time=gradingtime)

    with open(os.path.join(tmp_results, "queue_file.json"), 'w') as outfile:
        json.dump(queue_obj,
                  outfile,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '))

    # save the logs!
    shutil.copytree(tmp_logs, os.path.join(tmp_results, "logs"))

    # Save the .submit.notebook
    # Copy the .submit.notebook to tmp_work for validation
    submit_notebook_path = os.path.join(tmp_submission, 'submission',
                                        ".submit.notebook")
    if os.path.exists(submit_notebook_path):
        shutil.copy(submit_notebook_path,
                    os.path.join(tmp_results, ".submit.notebook"))
예제 #16
0
def main():
    #In order to successfully re-upload the assignment, we need the directory of the assignment to be inflated,
    #and the semester, course name, and assignment name that it should be uploaded to.
    parser = argparse.ArgumentParser(description='This script was created to help professors re-upload an old semester\'s\
        assignments for autograding and reevaluation. To run it, please create a new assignment via the submitty interface to\
        which you wish to upload a past assignment\'s submissions. Then, run this script.')
    parser.add_argument('-g', '--grade', action='store_true', help='adds assignments to the grading queue.')
    parser.add_argument('ARCHIVED_directory', help='The path to the top level of the old assignment\'s directory tree. This folder\'s\
        subdirectories should mirror a /var/local/submitty/courses/<semester>/<course>/submissions/<assignment_name> folder.')
    parser.add_argument('semester', help='The semester of the course you wish to upload to.')
    parser.add_argument('course_name', help='The name of the course you wish to upload to.')
    parser.add_argument('assignment_name', help='The assignment name you wish to upload to.')
    args = parser.parse_args()

    print("You are about to attempt to copy\n\tSOURCE: " + args.ARCHIVED_directory + "\n\tDESTINATION: "
     + SUBMITTY_DATA_DIR + "/" + args.semester+"/courses/"+args.course_name+"/submissions/"+args.assignment_name)

    #Check the existence of the submission path (the user should have already created the assignment for us to populate)
    CURRENT_course_path = os.path.join(SUBMITTY_DATA_DIR, "courses", args.semester, args.course_name)
    print("Current course path:" + CURRENT_course_path)
    CURRENT_assignment_path = os.path.join(CURRENT_course_path, "submissions", args.assignment_name)
    print("Current assignment path" + CURRENT_assignment_path)
    #we check the bin directory to see if the assignment has ever been built. (Not a guarantee, but high probability of success)
    CURRENT_bin_directory = os.path.join(CURRENT_course_path, "bin", args.assignment_name)
    if not os.path.isdir(CURRENT_bin_directory):
        raise SystemExit("ERROR: The directory " + CURRENT_bin_directory + " does not exist. Please make sure that you\n\t\
            1) Configured the assignment on the course website\n\t2) Did not mistype the program arguments.") 

    else:
        print("SUCCESS: The directory " + CURRENT_assignment_path + " does exist.")    

    #Make a connection to the database and grab the necessary tables.
    database = "submitty_" + args.semester + "_" + args.course_name
    print("Connecting to database: ", end="")
    engine = create_engine("postgresql://{}:{}@{}/{}".format(DB_USER, DB_PASS, DB_HOST,
                                                           database))
    conn = engine.connect()
    metadata = MetaData(bind=engine)
    print("(connection made, metadata bound)...")
    electronic_gradeable_data = Table("electronic_gradeable_data", metadata, autoload=True)
    electronic_gradeable_version = Table("electronic_gradeable_version", metadata, autoload=True)

    course_group = grp.getgrgid(os.stat(os.path.join(SUBMITTY_DATA_DIR,"courses",args.semester,args.course_name)).st_gid)[0]

    #For every user folder in our directory, for every submission folder inside of it, copy over that user/submission, add it to 
    #the database, and create a queue file. 
    for user_folder in sorted(os.listdir(args.ARCHIVED_directory)):
        print("evaluating " + user_folder)
        #If the item we are currently looking at is a directory, we will assume it is a student submission directory.
        ARCHIVED_user_dir = os.path.join(args.ARCHIVED_directory, user_folder)
        print("Starting work on archived dir: " + ARCHIVED_user_dir)
        if not os.path.isdir(ARCHIVED_user_dir):
            print("Skipping the following as it is not a directory: " + ARCHIVED_user_dir)
            continue
        user_name = user_folder
        print("processing user: "******"user_assignment_settings.json"), "r") as open_file:
            user_assignment_settings = json.load(open_file)

        #For every folder inside of the user submission folder.
        for submission in os.listdir(ARCHIVED_user_dir):
            ARCHIVED_submission_path = os.path.join(ARCHIVED_user_dir, submission)
            print("Evaluating submission: " + ARCHIVED_submission_path)
            #If this entry in the directory to be copied is not a directory, it is a user_assignments settings.
            #Right now, we ignore these.
            #TODO copy over the user_assignment_settings files and use them instead of creating new ones.
            if not os.path.isdir(ARCHIVED_submission_path):
                if user_folder == ".submit.timestamp":
                    with open(ARCHIVED_submission_path, 'r') as old_timestamp, open(CURRENT_user_path, 'w') as new_timestamp:
                        for line in old_timestamp:
                            new_timestamp.write(line)
                        os.system("chown -R submitty_php:{} {}".format(course_group, new_timestamp))
                continue
            #if the student's submission dir does not exist, make it.
            if not os.path.isdir(CURRENT_user_path):
                os.makedirs(CURRENT_user_path)
            #The current directory for the new submission
            CURRENT_submission_path = os.path.join(CURRENT_user_path, submission)
            #TODO If the submission already exists, give up?
            if os.path.isdir(CURRENT_submission_path):
                print("Skipped " + CURRENT_submission_path + " as it already exists.")
                continue
            #This permission also sets the underlying submission paths recursively.g
            print("Set permissions on the submission")
            os.system("chown -R submitty_php:{} {}".format(course_group, CURRENT_user_path))

            #TODO: Sort the submissions so that they are guaranteed to be given in chronological (1,2,3,etc) order.
            #copy in the submission directory.
            print("Copied from\n\tSOURCE: " + ARCHIVED_submission_path +"\n\t" + "DESTINATION: " + CURRENT_submission_path)
            shutil.copytree(ARCHIVED_submission_path, CURRENT_submission_path)
            #give the appropriate permissions
            os.system("chown -R submitty_php:{} {}".format(course_group, CURRENT_submission_path))
            #add each submission to the database.
            current_time_string = dateutils.write_submitty_date()

            conn.execute(electronic_gradeable_data.insert(), g_id=args.assignment_name, user_id=user_name,
                         g_version=submission, submission_time=current_time_string)
            #If this is the first submission, create a new entry in the table, otherwise, update.
            #TODO use a more reliable method of determining if this is the first submission.
            if int(submission) == 1:
                print("Entered new user " + user_name + " because submission was " + submission)
                conn.execute(electronic_gradeable_version.insert(), g_id=args.assignment_name, user_id=user_name,
                         active_version=user_assignment_settings['active_version'])
            else:
                print("UPDATED: where g_id is " + args.assignment_name + " and user id is " + user_name + " to value " + str(user_assignment_settings['active_version']))
                stmt = electronic_gradeable_version.update().\
                        where(and_(electronic_gradeable_version.c.g_id==args.assignment_name, electronic_gradeable_version.c.user_id==user_name)).\
                        values(active_version=user_assignment_settings['active_version'])
                conn.execute(stmt)
            with open(os.path.join(CURRENT_user_path, "user_assignment_settings.json"), "w") as open_file:
                json.dump(user_assignment_settings, open_file, indent = 4)

            if args.grade:
                # Create a queue file for each submission
                queue_file = "__".join([args.semester, args.course_name, args.assignment_name, user_name, submission])
                print("Creating queue file:", queue_file)
                queue_file = os.path.join(SUBMITTY_DATA_DIR, "to_be_graded_queue", queue_file)
                with open(queue_file, "w") as open_file:
                    # FIXME: This will need to be adjusted for team assignments
                    # and assignments with special required capabilities!
                    queue_time = dateutils.write_submitty_date()
                    json.dump({"semester": args.semester,
                               "course": args.course_name,
                               "gradeable": args.assignment_name,
                               "user": user_name,
                               "team": "",
                               "who": user_name,
                               "is_team": False,
                               "version": submission,
                               "required_capabilities" : "default",
                               "queue_time": queue_time,
                               "regrade": True,
                               "max_possible_grading_time": -1}, open_file)

    conn.close()
예제 #17
0
def prepare_autograding_and_submission_zip(which_machine, which_untrusted,
                                           next_directory, next_to_grade):
    os.chdir(SUBMITTY_DATA_DIR)

    # generate a random id to be used to track this job in the autograding logs
    job_id = ''.join(
        random.choice(string.ascii_letters + string.digits) for _ in range(6))

    # --------------------------------------------------------
    # figure out what we're supposed to grade & error checking
    obj = load_queue_file_obj(job_id, next_directory, next_to_grade)

    partial_path = os.path.join(obj["gradeable"], obj["who"],
                                str(obj["version"]))
    item_name = os.path.join(obj["semester"], obj["course"], "submissions",
                             partial_path)
    submission_path = os.path.join(SUBMITTY_DATA_DIR, "courses", item_name)
    if not os.path.isdir(submission_path):
        grade_items_logging.log_message(
            job_id,
            message="ERROR: the submission directory does not exist " +
            submission_path)
        raise RuntimeError("ERROR: the submission directory does not exist",
                           submission_path)
    print(which_machine, which_untrusted, "prepare zip", submission_path)
    is_vcs, vcs_type, vcs_base_url, vcs_subdirectory = get_vcs_info(
        SUBMITTY_DATA_DIR, obj["semester"], obj["course"], obj["gradeable"],
        obj["who"], obj["team"])

    is_batch_job = "regrade" in obj and obj["regrade"]
    is_batch_job_string = "BATCH" if is_batch_job else "INTERACTIVE"

    queue_time = get_queue_time(next_directory, next_to_grade)
    queue_time_longstring = dateutils.write_submitty_date(queue_time)
    grading_began = dateutils.get_current_time()
    waittime = (grading_began - queue_time).total_seconds()
    grade_items_logging.log_message(job_id, is_batch_job, "zip", item_name,
                                    "wait:", waittime, "")

    # --------------------------------------------------------
    # various paths
    provided_code_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                      obj["semester"], obj["course"],
                                      "provided_code", obj["gradeable"])
    instructor_solution_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                            obj["semester"], obj["course"],
                                            "instructor_solution",
                                            obj["gradeable"])
    test_input_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                   obj["semester"], obj["course"],
                                   "test_input", obj["gradeable"])
    test_output_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                    obj["semester"], obj["course"],
                                    "test_output", obj["gradeable"])
    custom_validation_code_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                               obj["semester"], obj["course"],
                                               "custom_validation_code",
                                               obj["gradeable"])
    bin_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                            obj["course"], "bin", obj["gradeable"])
    form_json_config = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                    obj["semester"], obj["course"], "config",
                                    "form",
                                    "form_" + obj["gradeable"] + ".json")
    complete_config = os.path.join(
        SUBMITTY_DATA_DIR, "courses", obj["semester"], obj["course"], "config",
        "complete_config", "complete_config_" + obj["gradeable"] + ".json")

    if not os.path.exists(form_json_config):
        grade_items_logging.log_message(
            job_id,
            message="ERROR: the form json file does not exist " +
            form_json_config)
        raise RuntimeError("ERROR: the form json file does not exist ",
                           form_json_config)
    if not os.path.exists(complete_config):
        grade_items_logging.log_message(
            job_id,
            message="ERROR: the complete config file does not exist " +
            complete_config)
        raise RuntimeError("ERROR: the complete config file does not exist ",
                           complete_config)

    # --------------------------------------------------------------------
    # MAKE TEMPORARY DIRECTORY & COPY THE NECESSARY FILES THERE
    tmp = tempfile.mkdtemp()
    tmp_autograding = os.path.join(tmp, "TMP_AUTOGRADING")
    os.mkdir(tmp_autograding)
    tmp_submission = os.path.join(tmp, "TMP_SUBMISSION")
    os.mkdir(tmp_submission)

    copytree_if_exists(provided_code_path,
                       os.path.join(tmp_autograding, "provided_code"))
    copytree_if_exists(instructor_solution_path,
                       os.path.join(tmp_autograding, "instructor_solution"))
    copytree_if_exists(test_input_path,
                       os.path.join(tmp_autograding, "test_input"))
    copytree_if_exists(test_output_path,
                       os.path.join(tmp_autograding, "test_output"))
    copytree_if_exists(custom_validation_code_path,
                       os.path.join(tmp_autograding, "custom_validation_code"))
    copytree_if_exists(bin_path, os.path.join(tmp_autograding, "bin"))
    shutil.copy(form_json_config, os.path.join(tmp_autograding, "form.json"))
    shutil.copy(complete_config,
                os.path.join(tmp_autograding, "complete_config.json"))

    checkout_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                                 obj["course"], "checkout", partial_path)
    results_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                                obj["course"], "results", partial_path)

    # grab a copy of the current history.json file (if it exists)
    history_file = os.path.join(results_path, "history.json")
    history_file_tmp = ""
    if os.path.isfile(history_file):
        shutil.copy(history_file, os.path.join(tmp_submission, "history.json"))
    # get info from the gradeable config file
    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    checkout_subdirectory = complete_config_obj["autograding"].get(
        "use_checkout_subdirectory", "")
    checkout_subdir_path = os.path.join(checkout_path, checkout_subdirectory)
    queue_file = os.path.join(next_directory, next_to_grade)

    # switch to tmp directory
    os.chdir(tmp)

    # make the logs directory
    tmp_logs = os.path.join(tmp, "TMP_SUBMISSION", "tmp_logs")
    os.makedirs(tmp_logs)
    # 'touch' a file in the logs folder
    open(os.path.join(tmp_logs, "overall.txt"), 'a')

    # --------------------------------------------------------------------
    # CONFIRM WE HAVE A CHECKOUT OF THE STUDENT'S REPO
    if is_vcs:
        # there should be a checkout log file in the results directory
        # move that file to the tmp logs directory..
        vcs_checkout_logfile = os.path.join(results_path, "logs",
                                            "vcs_checkout.txt")
        if os.path.isfile(vcs_checkout_logfile):
            shutil.move(vcs_checkout_logfile, tmp_logs)
        else:
            grade_items_logging.log_message(
                JOB_ID,
                message=str(my_name) +
                " ERROR: missing vcs_checkout.txt logfile " +
                str(vcs_checkout_logfile))

    copytree_if_exists(submission_path,
                       os.path.join(tmp_submission, "submission"))
    copytree_if_exists(checkout_path, os.path.join(tmp_submission, "checkout"))
    obj["queue_time"] = queue_time_longstring
    obj["regrade"] = is_batch_job
    obj["waittime"] = waittime
    obj["job_id"] = job_id

    with open(os.path.join(tmp_submission, "queue_file.json"), 'w') as outfile:
        json.dump(obj,
                  outfile,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '))

    grading_began_longstring = dateutils.write_submitty_date(grading_began)
    with open(os.path.join(tmp_submission, ".grading_began"), 'w') as f:
        print(grading_began_longstring, file=f)

    # zip up autograding & submission folders
    filehandle1, my_autograding_zip_file = tempfile.mkstemp()
    filehandle2, my_submission_zip_file = tempfile.mkstemp()
    grade_item.zip_my_directory(tmp_autograding, my_autograding_zip_file)
    grade_item.zip_my_directory(tmp_submission, my_submission_zip_file)
    os.close(filehandle1)
    os.close(filehandle2)
    # cleanup
    shutil.rmtree(tmp_autograding)
    shutil.rmtree(tmp_submission)
    shutil.rmtree(tmp)

    #grade_items_logging.log_message(job_id,is_batch_job,"done zip",item_name)

    return (my_autograding_zip_file, my_submission_zip_file)
예제 #18
0
def replay(starttime,endtime):
    replay_starttime=datetime.datetime.now()
    print (replay_starttime,"replay start: ",starttime)

    # error checking
    if not (starttime.year == endtime.year and
            starttime.month == endtime.month and
            starttime.day == endtime.day):
        print ("ERROR!  invalid replay range ",starttime,"->",endtime, " (must be same day)")
        exit()
    if starttime >= endtime:
        print ("ERROR!  invalid replay range ",starttime,"->",endtime, " (invalid times)")
        exit()

    # file the correct file
    file = '/var/local/submitty/logs/autograding/{:d}{:02d}{:02d}.txt'.format(starttime.year,starttime.month,starttime.day)
    with open(file,'r') as lines:
        for line in lines:
            things = line.split('|')
            original_time = dateutils.read_submitty_date(things[0])
            # skip items outside of this time range
            if (original_time < starttime or
                original_time > endtime):
                continue
            # skip batch items
            if (things[2].strip() == "BATCH"):
                continue
            # only process the "wait" time (when we started grading the item)
            iswait=things[5].strip()[0:5]
            if (iswait != "wait:"):
                continue
            waittime=float(things[5].split()[1])
            # grab the job name
            my_job = things[4].strip()
            if my_job == "":
                continue
            what = my_job.split('/')
            # for now, only interested in Data Structures and Computer Science 1
            if not (what[1]=="csci1200" or what[1]=="csci1100"):
                continue
            # calculate when this job should be relaunched
            time_multipler=1.0
            pause_time=replay_starttime+(time_multiplier*(original_time-starttime))
            pause.until(pause_time)
            queue_time = dateutils.write_submitty_date()
            print(datetime.datetime.now(),"      REPLAY: ",original_time," ",my_job)
            # FIXME : This will need to be adjust for team assigments
            # and assignments with special required capabilities!
            item = {"semester": what[0],
                    "course": what[1],
                    "gradeable": what[3],
                    "user": what[4],
                    "team": "",
                    "who": what[4],
                    "is_team": False,
                    "version": what[5],
                    "required_capabilities": "default",
                    "queue_time": queue_time,
                    "regrade": True,
                    "max_possible_grading_time" : -1 }
            file_name = "__".join([item['semester'], item['course'], item['gradeable'], item['who'], item['version']])
            file_name = os.path.join(SUBMITTY_DATA_DIR, "to_be_graded_queue", file_name)
            with open(file_name, "w") as open_file:
                json.dump(item, open_file, sort_keys=True, indent=4)
                os.system("chmod o+rw {}".format(file_name))  
    print (datetime.datetime.now(),"replay end: ",endtime)
예제 #19
0
def grade_from_zip(my_autograding_zip_file, my_submission_zip_file,
                   which_untrusted):

    os.chdir(SUBMITTY_DATA_DIR)
    tmp = os.path.join("/var/local/submitty/autograding_tmp/", which_untrusted,
                       "tmp")

    if os.path.exists(tmp):
        untrusted_grant_rwx_access(which_untrusted, tmp)
        add_permissions_recursive(
            tmp, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP
            | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH
            | stat.S_IXOTH, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
            | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH
            | stat.S_IWOTH | stat.S_IXOTH, stat.S_IRUSR | stat.S_IWUSR
            | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP
            | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    # Remove any and all containers left over from past runs.
    old_containers = subprocess.check_output(
        ['docker', 'ps', '-aq', '-f',
         'name={0}'.format(which_untrusted)]).split()

    for old_container in old_containers:
        subprocess.call(['docker', 'rm', '-f', old_container.decode('utf8')])

    # clean up old usage of this directory
    shutil.rmtree(tmp, ignore_errors=True)
    os.mkdir(tmp)

    which_machine = socket.gethostname()

    # unzip autograding and submission folders
    tmp_autograding = os.path.join(tmp, "TMP_AUTOGRADING")
    tmp_submission = os.path.join(tmp, "TMP_SUBMISSION")
    try:
        unzip_this_file(my_autograding_zip_file, tmp_autograding)
        unzip_this_file(my_submission_zip_file, tmp_submission)
    except:
        raise
    os.remove(my_autograding_zip_file)
    os.remove(my_submission_zip_file)

    tmp_logs = os.path.join(tmp, "TMP_SUBMISSION", "tmp_logs")

    queue_file = os.path.join(tmp_submission, "queue_file.json")
    with open(queue_file, 'r') as infile:
        queue_obj = json.load(infile)

    queue_time_longstring = queue_obj["queue_time"]
    waittime = queue_obj["waittime"]
    is_batch_job = queue_obj["regrade"]
    job_id = queue_obj["job_id"]
    is_batch_job_string = "BATCH" if is_batch_job else "INTERACTIVE"
    revision = queue_obj.get("revision", None)

    partial_path = os.path.join(queue_obj["gradeable"], queue_obj["who"],
                                str(queue_obj["version"]))
    item_name = os.path.join(queue_obj["semester"], queue_obj["course"],
                             "submissions", partial_path)

    grade_items_logging.log_message(job_id, is_batch_job, which_untrusted,
                                    item_name, "wait:", waittime, "")

    with open(os.path.join(tmp_submission, ".grading_began"), 'r') as f:
        grading_began_longstring = f.read()
    grading_began = dateutils.read_submitty_date(grading_began_longstring)

    submission_path = os.path.join(tmp_submission, "submission")
    checkout_path = os.path.join(tmp_submission, "checkout")

    provided_code_path = os.path.join(tmp_autograding, "provided_code")
    test_input_path = os.path.join(tmp_autograding, "test_input")
    test_output_path = os.path.join(tmp_autograding, "test_output")
    custom_validation_code_path = os.path.join(tmp_autograding,
                                               "custom_validation_code")
    bin_path = os.path.join(tmp_autograding, "bin")
    form_json_config = os.path.join(tmp_autograding, "form.json")
    complete_config = os.path.join(tmp_autograding, "complete_config.json")

    with open(form_json_config, 'r') as infile:
        gradeable_config_obj = json.load(infile)
    gradeable_deadline_string = gradeable_config_obj["date_due"]

    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    is_vcs = gradeable_config_obj["upload_type"] == "repository"
    checkout_subdirectory = complete_config_obj["autograding"].get(
        "use_checkout_subdirectory", "")
    checkout_subdir_path = os.path.join(checkout_path, checkout_subdirectory)

    if complete_config_obj.get('one_part_only', False):
        allow_only_one_part(submission_path,
                            os.path.join(tmp_logs, "overall.txt"))
        if is_vcs:
            with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
                print(
                    "WARNING:  ONE_PART_ONLY OPTION DOES NOT MAKE SENSE WITH VCS SUBMISSION",
                    file=f)

    # --------------------------------------------------------------------
    # START DOCKER

    # NOTE: DOCKER SUPPORT PRELIMINARY -- NEEDS MORE SECURITY BEFORE DEPLOYED ON LIVE SERVER
    complete_config = os.path.join(tmp_autograding, "complete_config.json")
    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    # Save ourselves if autograding_method is None.
    autograding_method = complete_config_obj.get("autograding_method", "")
    USE_DOCKER = True if autograding_method == "docker" else False

    # --------------------------------------------------------------------
    # COMPILE THE SUBMITTED CODE

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nCOMPILATION STARTS",
              file=f)

    # copy submitted files to the tmp compilation directory
    tmp_compilation = os.path.join(tmp, "TMP_COMPILATION")
    os.mkdir(tmp_compilation)
    os.chdir(tmp_compilation)

    submission_path = os.path.join(tmp_submission, "submission")
    checkout_path = os.path.join(tmp_submission, "checkout")

    provided_code_path = os.path.join(tmp_autograding, "provided_code")
    test_input_path = os.path.join(tmp_autograding, "test_input")
    test_output_path = os.path.join(tmp_autograding, "test_output")
    custom_validation_code_path = os.path.join(tmp_autograding,
                                               "custom_validation_code")
    bin_path = os.path.join(tmp_autograding, "bin")
    form_json_config = os.path.join(tmp_autograding, "form.json")

    with open(form_json_config, 'r') as infile:
        gradeable_config_obj = json.load(infile)
    gradeable_deadline_string = gradeable_config_obj["date_due"]

    patterns_submission_to_compilation = complete_config_obj["autograding"][
        "submission_to_compilation"]

    add_permissions(tmp_logs, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)

    if USE_DOCKER:
        print("!!!!!!!!!!!!!!!!!!USING DOCKER!!!!!!!!!!!!!!!!!!!!!!!!")

    with open(complete_config, 'r') as infile:
        config = json.load(infile)
        my_testcases = config['testcases']

    # grab the submission time
    with open(os.path.join(submission_path, ".submit.timestamp"),
              'r') as submission_time_file:
        submission_string = submission_time_file.read().rstrip()

    with open(os.path.join(tmp_logs, "compilation_log.txt"), 'w') as logfile:
        # we start counting from one.
        executable_path_list = list()
        for testcase_num in range(1, len(my_testcases) + 1):
            testcase_folder = os.path.join(tmp_compilation,
                                           "test{:02}".format(testcase_num))

            if 'type' in my_testcases[testcase_num - 1]:
                if my_testcases[testcase_num -
                                1]['type'] != 'FileCheck' and my_testcases[
                                    testcase_num - 1]['type'] != 'Compilation':
                    continue

                if my_testcases[testcase_num - 1]['type'] == 'Compilation':
                    if 'executable_name' in my_testcases[testcase_num - 1]:
                        provided_executable_list = my_testcases[
                            testcase_num - 1]['executable_name']
                        if not isinstance(provided_executable_list, (list, )):
                            provided_executable_list = list(
                                [provided_executable_list])
                        for executable_name in provided_executable_list:
                            if executable_name.strip() == '':
                                continue
                            executable_path = os.path.join(
                                testcase_folder, executable_name)
                            executable_path_list.append(
                                (executable_path, executable_name))
            else:
                continue

            os.makedirs(testcase_folder)

            pattern_copy("submission_to_compilation",
                         patterns_submission_to_compilation, submission_path,
                         testcase_folder, tmp_logs)

            if is_vcs:
                pattern_copy("checkout_to_compilation",
                             patterns_submission_to_compilation,
                             checkout_subdir_path, testcase_folder, tmp_logs)

            # copy any instructor provided code files to tmp compilation directory
            copy_contents_into(job_id, provided_code_path, testcase_folder,
                               tmp_logs)

            # copy compile.out to the current directory
            shutil.copy(os.path.join(bin_path, "compile.out"),
                        os.path.join(testcase_folder, "my_compile.out"))
            add_permissions(
                os.path.join(testcase_folder, "my_compile.out"), stat.S_IXUSR
                | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
            #untrusted_grant_rwx_access(which_untrusted, tmp_compilation)
            untrusted_grant_rwx_access(which_untrusted, testcase_folder)
            add_permissions_recursive(
                testcase_folder, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
                | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH
                | stat.S_IWOTH | stat.S_IXOTH, stat.S_IRUSR | stat.S_IWUSR
                | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP
                | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH, stat.S_IRUSR
                | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP
                | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

            if USE_DOCKER:
                try:
                    #There can be only one container for a compilation step, so grab its container image
                    #TODO: set default in load_config_json.cpp
                    if my_testcases[testcase_num - 1]['type'] == 'FileCheck':
                        print(
                            "performing filecheck in default ubuntu:custom container"
                        )
                        container_image = "ubuntu:custom"
                    else:
                        container_image = my_testcases[
                            testcase_num -
                            1]["containers"][0]["container_image"]
                        print(
                            'creating a compilation container with image {0}'.
                            format(container_image))
                    untrusted_uid = str(getpwnam(which_untrusted).pw_uid)

                    compilation_container = None
                    compilation_container = subprocess.check_output([
                        'docker',
                        'create',
                        '-i',
                        '-u',
                        untrusted_uid,
                        '--network',
                        'none',
                        '-v',
                        testcase_folder + ':' + testcase_folder,
                        '-w',
                        testcase_folder,
                        container_image,
                        #The command to be run.
                        os.path.join(testcase_folder, 'my_compile.out'),
                        queue_obj['gradeable'],
                        queue_obj['who'],
                        str(queue_obj['version']),
                        submission_string,
                        '--testcase',
                        str(testcase_num)
                    ]).decode('utf8').strip()
                    print("starting container")
                    compile_success = subprocess.call(
                        ['docker', 'start', '-i', compilation_container],
                        stdout=logfile,
                        cwd=testcase_folder)
                except Exception as e:
                    print('An error occurred when compiling with docker.')
                    grade_items_logging.log_stack_trace(
                        job_id,
                        is_batch_job,
                        which_untrusted,
                        item_name,
                        trace=traceback.format_exc())
                finally:
                    if compilation_container != None:
                        subprocess.call(
                            ['docker', 'rm', '-f', compilation_container])
                        print("cleaned up compilation container.")
            else:
                compile_success = subprocess.call([
                    os.path.join(SUBMITTY_INSTALL_DIR, "sbin",
                                 "untrusted_execute"), which_untrusted,
                    os.path.join(testcase_folder, "my_compile.out"),
                    queue_obj["gradeable"], queue_obj["who"],
                    str(queue_obj["version"]), submission_string, '--testcase',
                    str(testcase_num)
                ],
                                                  stdout=logfile,
                                                  cwd=testcase_folder)
            # remove the compilation program
            untrusted_grant_rwx_access(which_untrusted, testcase_folder)
            os.remove(os.path.join(testcase_folder, "my_compile.out"))

    if compile_success == 0:
        print(which_machine, which_untrusted, "COMPILATION OK")
    else:
        print(which_machine, which_untrusted, "COMPILATION FAILURE")
        grade_items_logging.log_message(job_id,
                                        is_batch_job,
                                        which_untrusted,
                                        item_name,
                                        message="COMPILATION FAILURE")
    add_permissions_recursive(
        tmp_compilation, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
        | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH
        | stat.S_IWOTH | stat.S_IXOTH, stat.S_IRUSR | stat.S_IWUSR
        | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP
        | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH, stat.S_IRUSR
        | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP
        | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    # return to the main tmp directory
    os.chdir(tmp)

    # --------------------------------------------------------------------
    # make the runner directory

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nRUNNER STARTS", file=f)

    tmp_work = os.path.join(tmp, "TMP_WORK")
    tmp_work_test_input = os.path.join(tmp_work, "test_input")
    tmp_work_submission = os.path.join(tmp_work, "submitted_files")
    tmp_work_compiled = os.path.join(tmp_work, "compiled_files")
    tmp_work_checkout = os.path.join(tmp_work, "checkout")

    os.mkdir(tmp_work)

    os.mkdir(tmp_work_test_input)
    os.mkdir(tmp_work_submission)
    os.mkdir(tmp_work_compiled)
    os.mkdir(tmp_work_checkout)

    os.chdir(tmp_work)

    # move all executable files from the compilation directory to the main tmp directory
    # Note: Must preserve the directory structure of compiled files (esp for Java)

    patterns_submission_to_runner = complete_config_obj["autograding"][
        "submission_to_runner"]

    pattern_copy("submission_to_runner", patterns_submission_to_runner,
                 submission_path, tmp_work_submission, tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_runner", patterns_submission_to_runner,
                     checkout_subdir_path, tmp_work_checkout, tmp_logs)

    # move the compiled files into the tmp_work_compiled directory
    for path, name in executable_path_list:
        if not os.path.isfile(path):
            continue
        target_path = os.path.join(tmp_work_compiled, name)
        if not os.path.exists(target_path):
            os.makedirs(os.path.dirname(target_path), exist_ok=True)
        shutil.copy(path, target_path)
        print('copied over {0}'.format(target_path))
        with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
            print('grade_item: copied over {0}'.format(target_path), file=f)

    patterns_compilation_to_runner = complete_config_obj["autograding"][
        "compilation_to_runner"]
    #copy into the actual tmp_work directory for archiving/validating
    pattern_copy("compilation_to_runner", patterns_compilation_to_runner,
                 tmp_compilation, tmp_work, tmp_logs)
    #copy into tmp_work_compiled, which is provided to each testcase
    # TODO change this as our methodology for declaring testcase dependencies becomes more robust
    pattern_copy("compilation_to_runner", patterns_compilation_to_runner,
                 tmp_compilation, tmp_work_compiled, tmp_logs)

    # copy input files to tmp_work directory
    copy_contents_into(job_id, test_input_path, tmp_work_test_input, tmp_logs)

    subprocess.call(['ls', '-lR', '.'],
                    stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy runner.out to the current directory
    shutil.copy(os.path.join(bin_path, "run.out"),
                os.path.join(tmp_work, "my_runner.out"))

    #set the appropriate permissions for the newly created directories
    #TODO replaces commented out code below

    add_permissions(
        os.path.join(tmp_work, "my_runner.out"), stat.S_IXUSR | stat.S_IXGRP
        | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
    add_permissions(tmp_work_submission,
                    stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
    add_permissions(tmp_work_compiled,
                    stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
    add_permissions(tmp_work_checkout,
                    stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    #TODO this is how permissions used to be set. It was removed because of the way it interacts with the sticky bit.
    ## give the untrusted user read/write/execute permissions on the tmp directory & files
    # os.system('ls -al {0}'.format(tmp_work))
    # add_permissions_recursive(tmp_work,
    #                           stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
    #                           stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
    #                           stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    ##################################################################################################
    #call grade_item_main_runner.py
    runner_success = grade_item_main_runner.executeTestcases(
        complete_config_obj, tmp_logs, tmp_work, queue_obj, submission_string,
        item_name, USE_DOCKER, None, which_untrusted, job_id, grading_began)
    ##################################################################################################

    if runner_success == 0:
        print(which_machine, which_untrusted, "RUNNER OK")
    else:
        print(which_machine, which_untrusted, "RUNNER FAILURE")
        grade_items_logging.log_message(job_id,
                                        is_batch_job,
                                        which_untrusted,
                                        item_name,
                                        message="RUNNER FAILURE")

    add_permissions_recursive(
        tmp_work, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP
        | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH
        | stat.S_IXOTH, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
        | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH
        | stat.S_IWOTH | stat.S_IXOTH, stat.S_IRUSR | stat.S_IWUSR
        | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP
        | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
    add_permissions_recursive(
        tmp_compilation, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
        | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH
        | stat.S_IWOTH | stat.S_IXOTH, stat.S_IRUSR | stat.S_IWUSR
        | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP
        | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH, stat.S_IRUSR
        | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP
        | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    # --------------------------------------------------------------------
    # RUN VALIDATOR
    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nVALIDATION STARTS",
              file=f)

    # copy results files from compilation...
    patterns_submission_to_validation = complete_config_obj["autograding"][
        "submission_to_validation"]
    pattern_copy("submission_to_validation", patterns_submission_to_validation,
                 submission_path, tmp_work, tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_validation",
                     patterns_submission_to_validation, checkout_subdir_path,
                     tmp_work, tmp_logs)
    patterns_compilation_to_validation = complete_config_obj["autograding"][
        "compilation_to_validation"]
    pattern_copy("compilation_to_validation",
                 patterns_compilation_to_validation, tmp_compilation, tmp_work,
                 tmp_logs)

    # remove the compilation directory
    shutil.rmtree(tmp_compilation)

    # copy output files to tmp_work directory
    copy_contents_into(job_id, test_output_path, tmp_work, tmp_logs)

    # copy any instructor custom validation code into the tmp work directory
    copy_contents_into(job_id, custom_validation_code_path, tmp_work, tmp_logs)

    subprocess.call(['ls', '-lR', '.'],
                    stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy validator.out to the current directory
    shutil.copy(os.path.join(bin_path, "validate.out"),
                os.path.join(tmp_work, "my_validator.out"))

    # give the untrusted user read/write/execute permissions on the tmp directory & files
    add_permissions_recursive(tmp_work,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    add_permissions(
        os.path.join(tmp_work, "my_validator.out"), stat.S_IXUSR | stat.S_IXGRP
        | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    #todo remove prints.
    print("VALIDATING")
    # validator the validator.out as the untrusted user
    with open(os.path.join(tmp_logs, "validator_log.txt"), 'w') as logfile:
        if USE_DOCKER:
            # WIP: This option file facilitated testing...
            #USE_DOCKER = os.path.isfile("/tmp/use_docker")
            #use_docker_string="grading begins, using DOCKER" if USE_DOCKER else "grading begins (not using docker)"
            #grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,message=use_docker_string)
            container = subprocess.check_output([
                'docker', 'run', '-t', '-d', '-v', tmp + ':' + tmp,
                'ubuntu:custom'
            ]).decode('utf8').strip()
            dockerlaunch_done = dateutils.get_current_time()
            dockerlaunch_time = (dockerlaunch_done -
                                 grading_began).total_seconds()
            grade_items_logging.log_message(job_id, is_batch_job,
                                            which_untrusted, item_name,
                                            "dcct:", dockerlaunch_time,
                                            "docker container created")

            validator_success = subprocess.call([
                'docker', 'exec', '-w', tmp_work, container,
                os.path.join(tmp_work, 'my_validator.out'),
                queue_obj['gradeable'], queue_obj['who'],
                str(queue_obj['version']), submission_string
            ],
                                                stdout=logfile)
        else:
            validator_success = subprocess.call([
                os.path.join(SUBMITTY_INSTALL_DIR, "sbin",
                             "untrusted_execute"), which_untrusted,
                os.path.join(tmp_work, "my_validator.out"),
                queue_obj["gradeable"], queue_obj["who"],
                str(queue_obj["version"]), submission_string
            ],
                                                stdout=logfile)

    if validator_success == 0:
        print(which_machine, which_untrusted, "VALIDATOR OK")
    else:
        print(which_machine, which_untrusted, "VALIDATOR FAILURE")
        grade_items_logging.log_message(job_id,
                                        is_batch_job,
                                        which_untrusted,
                                        item_name,
                                        message="VALIDATION FAILURE")

    untrusted_grant_rwx_access(which_untrusted, tmp_work)

    # grab the result of autograding
    grade_result = ""
    try:
        with open(os.path.join(tmp_work, "grade.txt")) as f:
            lines = f.readlines()
            for line in lines:
                line = line.rstrip('\n')
                if line.startswith("Automatic grading total:"):
                    grade_result = line
    except:
        with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
            print("\n\nERROR: Grading incomplete -- Could not open ",
                  os.path.join(tmp_work, "grade.txt"))
            grade_items_logging.log_message(
                job_id,
                is_batch_job,
                which_untrusted,
                item_name,
                message="ERROR: grade.txt does not exist")
            grade_items_logging.log_stack_trace(job_id,
                                                is_batch_job,
                                                which_untrusted,
                                                item_name,
                                                trace=traceback.format_exc())

    # --------------------------------------------------------------------
    # MAKE RESULTS DIRECTORY & COPY ALL THE FILES THERE
    tmp_results = os.path.join(tmp, "TMP_RESULTS")

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nARCHIVING STARTS", file=f)

    subprocess.call(['ls', '-lR', '.'],
                    stdout=open(tmp_logs + "/overall.txt", 'a'))

    os.makedirs(os.path.join(tmp_results, "details"))

    # remove the test_input directory, so we don't archive it!
    shutil.rmtree(os.path.join(tmp_work, "test_input"))

    # loop over the test case directories, and remove any files that are also in the test_input folder
    for testcase_num in range(1, len(my_testcases) + 1):
        testcase_folder = os.path.join(tmp_work,
                                       "test{:02}".format(testcase_num))
        remove_test_input_files(os.path.join(tmp_logs, "overall.txt"),
                                test_input_path, testcase_folder)

    patterns_work_to_details = complete_config_obj["autograding"][
        "work_to_details"]
    pattern_copy("work_to_details", patterns_work_to_details, tmp_work,
                 os.path.join(tmp_results, "details"), tmp_logs)

    if ("work_to_public" in complete_config_obj["autograding"]
            and len(complete_config_obj["autograding"]["work_to_public"]) > 0):
        # create the directory
        os.makedirs(os.path.join(tmp_results, "results_public"))
        # copy the files
        patterns_work_to_public = complete_config_obj["autograding"][
            "work_to_public"]
        pattern_copy("work_to_public", patterns_work_to_public, tmp_work,
                     os.path.join(tmp_results, "results_public"), tmp_logs)

    history_file_tmp = os.path.join(tmp_submission, "history.json")
    history_file = os.path.join(tmp_results, "history.json")
    if os.path.isfile(history_file_tmp):
        shutil.move(history_file_tmp, history_file)
        # fix permissions
        ta_group_id = os.stat(tmp_results).st_gid
        os.chown(history_file, int(DAEMON_UID), ta_group_id)
        add_permissions(history_file, stat.S_IRGRP)
    grading_finished = dateutils.get_current_time()

    try:
        shutil.copy(os.path.join(tmp_work, "grade.txt"), tmp_results)
    except:
        with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
            print("\n\nERROR: Grading incomplete -- Could not copy ",
                  os.path.join(tmp_work, "grade.txt"))
        grade_items_logging.log_message(
            job_id,
            is_batch_job,
            which_untrusted,
            item_name,
            message="ERROR: grade.txt does not exist")
        grade_items_logging.log_stack_trace(job_id,
                                            is_batch_job,
                                            which_untrusted,
                                            item_name,
                                            trace=traceback.format_exc())

    # -------------------------------------------------------------
    # create/append to the results history

    # grab the submission time
    with open(os.path.join(submission_path,
                           ".submit.timestamp")) as submission_time_file:
        submission_string = submission_time_file.read().rstrip()
    submission_datetime = dateutils.read_submitty_date(submission_string)

    gradeable_deadline_datetime = dateutils.read_submitty_date(
        gradeable_deadline_string)
    gradeable_deadline_longstring = dateutils.write_submitty_date(
        gradeable_deadline_datetime)
    submission_longstring = dateutils.write_submitty_date(submission_datetime)

    seconds_late = int(
        (submission_datetime - gradeable_deadline_datetime).total_seconds())
    # note: negative = not late

    grading_finished_longstring = dateutils.write_submitty_date(
        grading_finished)

    gradingtime = (grading_finished - grading_began).total_seconds()

    with open(os.path.join(tmp_submission, "queue_file.json"), 'r') as infile:
        queue_obj = json.load(infile)
    queue_obj["gradingtime"] = gradingtime
    queue_obj["grade_result"] = grade_result
    queue_obj["which_untrusted"] = which_untrusted

    with open(os.path.join(tmp_results, "queue_file.json"), 'w') as outfile:
        json.dump(queue_obj,
                  outfile,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '))

    try:
        shutil.move(os.path.join(tmp_work, "results.json"),
                    os.path.join(tmp_results, "results.json"))
    except:
        with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
            print("\n\nERROR: Grading incomplete -- Could not open/write ",
                  os.path.join(tmp_work, "results.json"))
            grade_items_logging.log_message(
                job_id,
                is_batch_job,
                which_untrusted,
                item_name,
                message="ERROR: results.json read/write error")
            grade_items_logging.log_stack_trace(job_id,
                                                is_batch_job,
                                                which_untrusted,
                                                item_name,
                                                trace=traceback.format_exc())

    write_grade_history.just_write_grade_history(
        history_file, gradeable_deadline_longstring, submission_longstring,
        seconds_late, queue_time_longstring, is_batch_job_string,
        grading_began_longstring, int(waittime), grading_finished_longstring,
        int(gradingtime), grade_result, revision)

    os.chdir(SUBMITTY_DATA_DIR)

    if USE_DOCKER:
        with open(os.path.join(tmp_logs, "overall_log.txt"), 'w') as logfile:
            chmod_success = subprocess.call([
                'docker', 'exec', '-w', tmp_work, container, 'chmod', '-R',
                'ugo+rwx', '.'
            ],
                                            stdout=logfile)

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        f.write("FINISHED GRADING!\n")

    # save the logs!
    shutil.copytree(tmp_logs, os.path.join(tmp_results, "logs"))

    # zip up results folder
    filehandle, my_results_zip_file = tempfile.mkstemp()
    zip_my_directory(tmp_results, my_results_zip_file)
    os.close(filehandle)
    shutil.rmtree(tmp_autograding)
    shutil.rmtree(tmp_submission)
    shutil.rmtree(tmp_results)
    shutil.rmtree(tmp_work)
    shutil.rmtree(tmp)

    # WIP: extra logging for testing
    #grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,message="done grading")

    # --------------------------------------------------------------------
    # CLEAN UP DOCKER
    if USE_DOCKER:
        subprocess.call(['docker', 'rm', '-f', container])
        dockerdestroy_done = dateutils.get_current_time()
        dockerdestroy_time = (dockerdestroy_done -
                              grading_finished).total_seconds()
        grade_items_logging.log_message(job_id, is_batch_job, which_untrusted,
                                        item_name, "ddt:", dockerdestroy_time,
                                        "docker container destroyed")

    grade_items_logging.log_message(job_id, is_batch_job, which_untrusted,
                                    item_name, "grade:", gradingtime,
                                    grade_result)

    return my_results_zip_file
예제 #20
0
def archive_autograding_results(working_directory, job_id, which_untrusted, is_batch_job, complete_config_obj, 
                                gradeable_config_obj, queue_obj, log_path, stack_trace_log_path, is_test_environment):
    """ After grading is finished, archive the results. """

    tmp_autograding = os.path.join(working_directory,"TMP_AUTOGRADING")
    tmp_submission = os.path.join(working_directory,"TMP_SUBMISSION")
    tmp_work = os.path.join(working_directory,"TMP_WORK")
    tmp_logs = os.path.join(working_directory,"TMP_SUBMISSION","tmp_logs")
    tmp_results = os.path.join(working_directory,"TMP_RESULTS")
    submission_path = os.path.join(tmp_submission, "submission")
    random_output_path = os.path.join(tmp_work, 'random_output')

    if "generate_output" not in queue_obj:
        partial_path = os.path.join(queue_obj["gradeable"],queue_obj["who"],str(queue_obj["version"]))
        item_name = os.path.join(queue_obj["semester"],queue_obj["course"],"submissions",partial_path)
    elif queue_obj["generate_output"]:
        item_name = os.path.join(queue_obj["semester"],queue_obj["course"],"generated_output",queue_obj["gradeable"])
    results_public_dir = os.path.join(tmp_results,"results_public")
    results_details_dir = os.path.join(tmp_results, "details")
    patterns = complete_config_obj['autograding']

    # Copy work to details
    pattern_copy("work_to_details", patterns['work_to_details'], tmp_work, results_details_dir, tmp_logs)
    
    # Copy work to public
    if 'work_to_public' in patterns:
        pattern_copy("work_to_public", patterns['work_to_public'], tmp_work, results_public_dir, tmp_logs)

    if os.path.exists(random_output_path):
        pattern_copy("work_to_random_output", [os.path.join(random_output_path, 'test*', '**', '*.txt'),], tmp_work, tmp_results, tmp_logs)
    # timestamp of first access to the gradeable page
    first_access_string = ""
    # grab the submission time
    if "generate_output" in queue_obj and queue_obj["generate_output"]:
        submission_string = ""
    else:
        with open(os.path.join(tmp_submission, 'submission' ,".submit.timestamp"), 'r') as submission_time_file:
            submission_string = submission_time_file.read().rstrip()
        # grab the first access to the gradeable page (if it exists)
        user_assignment_access_filename = os.path.join(tmp_submission, "user_assignment_access.json")
        if os.path.exists(user_assignment_access_filename):
            with open(user_assignment_access_filename, 'r') as access_file:
                obj = json.load(access_file, object_pairs_hook=collections.OrderedDict)
                first_access_string = obj["page_load_history"][0]["time"]

    history_file_tmp = os.path.join(tmp_submission,"history.json")
    history_file = os.path.join(tmp_results,"history.json")
    if os.path.isfile(history_file_tmp) and not is_test_environment:

        from . import CONFIG_PATH
        with open(os.path.join(CONFIG_PATH, 'submitty_users.json')) as open_file:
            OPEN_JSON = json.load(open_file)
        DAEMON_UID = OPEN_JSON['daemon_uid']

        shutil.move(history_file_tmp, history_file)
        # fix permissions
        ta_group_id = os.stat(tmp_results).st_gid
        os.chown(history_file, int(DAEMON_UID),ta_group_id)
        add_permissions(history_file, stat.S_IRGRP)
    grading_finished = dateutils.get_current_time()

    if "generate_output" not in queue_obj:
        try:
            shutil.copy(os.path.join(tmp_work, "grade.txt"), tmp_results)
        except:
            with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
                print ("\n\nERROR: Grading incomplete -- Could not copy ",os.path.join(tmp_work,"grade.txt"))
            log_message(log_path, job_id, is_batch_job, which_untrusted, item_name, message="ERROR: grade.txt does not exist")
            log_stack_trace(stack_trace_log_path, job_id, is_batch_job, which_untrusted, item_name, trace=traceback.format_exc())

        grade_result = ""
        try:
            with open(os.path.join(tmp_work,"grade.txt")) as f:
                lines = f.readlines()
                for line in lines:
                    line = line.rstrip('\n')
                    if line.startswith("Automatic grading total:"):
                        grade_result = line
        except:
            with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
                print ("\n\nERROR: Grading incomplete -- Could not open ",os.path.join(tmp_work,"grade.txt"))
                log_message(job_id,is_batch_job,which_untrusted,item_name,message="ERROR: grade.txt does not exist")
                log_stack_trace(job_id,is_batch_job,which_untrusted,item_name,trace=traceback.format_exc())


        gradeable_deadline_string = gradeable_config_obj["date_due"]

        # FIXME: The access date string is currently misformatted
        #    mm-dd-yyyy, but we want yyyy-mm-dd.  Also it is missing
        #    the common name timezone string, e.g., "America/NewYork".
        #    We should standardize this logging eventually, but
        #    keeping it as is because we are mid-semester with this
        #    new feature and I don't want to break things.
        first_access_string = dateutils.normalize_submitty_date(first_access_string)
        
        submission_datetime = dateutils.read_submitty_date(submission_string)
        gradeable_deadline_datetime = dateutils.read_submitty_date(gradeable_deadline_string)
        gradeable_deadline_longstring = dateutils.write_submitty_date(gradeable_deadline_datetime)
        submission_longstring = dateutils.write_submitty_date(submission_datetime)
        seconds_late = int((submission_datetime-gradeable_deadline_datetime).total_seconds())
        # compute the access duration in seconds (if it exists)
        access_duration = -1
        if first_access_string != "":
            first_access_datetime = dateutils.read_submitty_date(first_access_string)
            access_duration = int((submission_datetime-first_access_datetime).total_seconds())

        # note: negative = not late
        grading_finished_longstring = dateutils.write_submitty_date(grading_finished)

        with open(os.path.join(tmp_submission,".grading_began"), 'r') as f:
            grading_began_longstring = f.read()
        grading_began = dateutils.read_submitty_date(grading_began_longstring)

        gradingtime = (grading_finished - grading_began).total_seconds()

        queue_obj["gradingtime"]=gradingtime
        queue_obj["grade_result"]=grade_result
        queue_obj["which_untrusted"]=which_untrusted
        waittime = queue_obj["waittime"]

        try:

            # Make certain results.json is utf-8 encoded.
            results_json_path = os.path.join(tmp_work, 'results.json')
            with codecs.open(results_json_path, 'r', encoding='utf-8', errors='ignore') as infile:
                results_str = "".join(line.rstrip() for line in infile)
                results_obj = json.loads(results_str)
            with open(results_json_path, 'w') as outfile:
                json.dump(results_obj, outfile, indent=4)

            shutil.move(results_json_path, os.path.join(tmp_results, "results.json"))
        except:
            with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
                print ("\n\nERROR: Grading incomplete -- Could not open/write ",os.path.join(tmp_work,"results.json"))
                log_message(log_path, job_id,is_batch_job,which_untrusted,item_name,message="ERROR: results.json read/write error")
                log_stack_trace(stack_trace_log_path, job_id,is_batch_job,which_untrusted,item_name,trace=traceback.format_exc())

        # Rescue custom validator files
        custom_validator_output_directory = os.path.join(tmp_results, "custom_validator_output")
        pattern_copy("rescue_custom_validator_validation_jsons", [os.path.join(tmp_work, 'validation_results_*.json'),], tmp_work, custom_validator_output_directory, tmp_logs)
        pattern_copy("rescue_custom_validator_logs", [os.path.join(tmp_work, 'validation_logfile_*.txt'),], tmp_work, custom_validator_output_directory, tmp_logs)
        pattern_copy("rescue_custom_validator_errors", [os.path.join(tmp_work, 'validation_stderr_*.txt'),], tmp_work, custom_validator_output_directory, tmp_logs)

        just_write_grade_history(history_file,
                                gradeable_deadline_longstring,
                                submission_longstring,
                                seconds_late,
                                first_access_string,
                                access_duration,
                                queue_obj["queue_time"],
                                "BATCH" if is_batch_job else "INTERACTIVE",
                                grading_began_longstring,
                                int(waittime),
                                grading_finished_longstring,
                                int(gradingtime),
                                grade_result,
                                queue_obj.get("revision", None))

        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            f.write("FINISHED GRADING!\n")
        
        log_message(log_path, job_id,is_batch_job,which_untrusted,item_name,"grade:",gradingtime,grade_result)

    with open(os.path.join(tmp_results,"queue_file.json"),'w') as outfile:
        json.dump(queue_obj,outfile,sort_keys=True,indent=4,separators=(',', ': '))

    # save the logs!
    shutil.copytree(tmp_logs,os.path.join(tmp_results,"logs"))
def prepare_job(my_name,which_machine,which_untrusted,next_directory,next_to_grade):
    # verify the DAEMON_USER is running this script
    if not int(os.getuid()) == int(DAEMON_UID):
        grade_items_logging.log_message(JOB_ID, message="ERROR: must be run by DAEMON_USER")
        raise SystemExit("ERROR: the grade_item.py script must be run by the DAEMON_USER")

    if which_machine == 'localhost':
        address = which_machine
    else:
        address = which_machine.split('@')[1]

    # prepare the zip files
    try:
        autograding_zip_tmp,submission_zip_tmp = packer_unpacker.prepare_autograding_and_submission_zip(which_machine,which_untrusted,next_directory,next_to_grade)
        fully_qualified_domain_name = socket.getfqdn()
        servername_workername = "{0}_{1}".format(fully_qualified_domain_name, address)
        autograding_zip = os.path.join(SUBMITTY_DATA_DIR,"autograding_TODO",servername_workername+"_"+which_untrusted+"_autograding.zip")
        submission_zip = os.path.join(SUBMITTY_DATA_DIR,"autograding_TODO",servername_workername+"_"+which_untrusted+"_submission.zip")
        todo_queue_file = os.path.join(SUBMITTY_DATA_DIR,"autograding_TODO",servername_workername+"_"+which_untrusted+"_queue.json")

        with open(next_to_grade, 'r') as infile:
            queue_obj = json.load(infile)
            queue_obj["which_untrusted"] = which_untrusted
            queue_obj["which_machine"] = which_machine
            queue_obj["ship_time"] = dateutils.write_submitty_date(microseconds=True)
    except Exception as e:
        grade_items_logging.log_stack_trace(job_id=JOB_ID, trace=traceback.format_exc())
        grade_items_logging.log_message(JOB_ID, message="ERROR: failed preparing submission zip or accessing next to grade "+str(e))
        print("ERROR: failed preparing submission zip or accessing next to grade ", e)
        return False

    if address == "localhost":
        try:
            shutil.move(autograding_zip_tmp,autograding_zip)
            shutil.move(submission_zip_tmp,submission_zip)
            with open(todo_queue_file, 'w') as outfile:
                json.dump(queue_obj, outfile, sort_keys=True, indent=4)
        except Exception as e:
            grade_items_logging.log_stack_trace(job_id=JOB_ID, trace=traceback.format_exc())
            grade_items_logging.log_message(JOB_ID, message="ERROR: could not move files due to the following error: "+str(e))
            print("ERROR: could not move files due to the following error: {0}".format(e))
            return False
    else:
        try:
            user, host = which_machine.split("@")
            ssh = paramiko.SSHClient()
            ssh.get_host_keys()
            ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())

            ssh.connect(hostname = host, username = user, timeout=5)
            sftp = ssh.open_sftp()

            sftp.put(autograding_zip_tmp,autograding_zip)
            sftp.put(submission_zip_tmp,submission_zip)
            with open(todo_queue_file, 'w') as outfile:
                json.dump(queue_obj, outfile, sort_keys=True, indent=4)
            sftp.put(todo_queue_file, todo_queue_file)
            os.remove(todo_queue_file)
            print("Successfully forwarded files to {0}".format(my_name))
            success = True
        except Exception as e:
            grade_items_logging.log_stack_trace(job_id=JOB_ID, trace=traceback.format_exc())
            grade_items_logging.log_message(JOB_ID, message="ERROR: could not move files due to the following error: "+str(e))
            print("Could not move files due to the following error: {0}".format(e))
            success = False
        finally:
            sftp.close()
            ssh.close()
            os.remove(autograding_zip_tmp)
            os.remove(submission_zip_tmp)
            return success

    # log completion of job preparation
    obj = packer_unpacker.load_queue_file_obj(JOB_ID,next_directory,next_to_grade)
    partial_path = os.path.join(obj["gradeable"],obj["who"],str(obj["version"]))
    item_name = os.path.join(obj["semester"],obj["course"],"submissions",partial_path)
    is_batch = "regrade" in obj and obj["regrade"]
    grade_items_logging.log_message(JOB_ID, jobname=item_name, which_untrusted=which_untrusted,
                                    is_batch=is_batch, message="Prepared job for " + which_machine)
    return True
예제 #22
0
def replay(starttime,endtime):
    replay_starttime=datetime.datetime.now()
    print (replay_starttime,"replay start: ",starttime)

    # error checking
    if not (starttime.year == endtime.year and
            starttime.month == endtime.month and
            starttime.day == endtime.day):
        print ("ERROR!  invalid replay range ",starttime,"->",endtime, " (must be same day)")
        exit()
    if starttime >= endtime:
        print ("ERROR!  invalid replay range ",starttime,"->",endtime, " (invalid times)")
        exit()

    # file the correct file
    file = '/var/local/submitty/logs/autograding/{:d}{:02d}{:02d}.txt'.format(starttime.year,starttime.month,starttime.day)
    with open(file,'r') as lines:
        for line in lines:
            things = line.split('|')
            original_time = dateutils.read_submitty_date(things[0])
            # skip items outside of this time range
            if (original_time < starttime or
                original_time > endtime):
                continue
            # skip batch items
            if (things[2].strip() == "BATCH"):
                continue
            # only process the "wait" time (when we started grading the item)
            iswait=things[5].strip()[0:5]
            if (iswait != "wait:"):
                continue
            waittime=float(things[5].split()[1])
            # grab the job name
            my_job = things[4].strip()
            if my_job == "":
                continue
            what = my_job.split('/')
            # for now, only interested in Data Structures and Computer Science 1
            if not (what[1]=="csci1200" or what[1]=="csci1100"):
                continue
            # calculate when this job should be relaunched
            time_multipler=1.0
            pause_time=replay_starttime+(time_multiplier*(original_time-starttime))
            pause.until(pause_time)
            queue_time = dateutils.write_submitty_date()
            print(datetime.datetime.now(),"      REPLAY: ",original_time," ",my_job)
            # FIXME : This will need to be adjust for team assignments
            # and assignments with special required capabilities!
            item = {"semester": what[0],
                    "course": what[1],
                    "gradeable": what[3],
                    "user": what[4],
                    "team": "",
                    "who": what[4],
                    "is_team": False,
                    "version": what[5],
                    "required_capabilities": "default",
                    "queue_time": queue_time,
                    "regrade": True,
                    "max_possible_grading_time" : -1 }
            file_name = "__".join([item['semester'], item['course'], item['gradeable'], item['who'], item['version']])
            file_name = os.path.join(SUBMITTY_DATA_DIR, "to_be_graded_queue", file_name)
            with open(file_name, "w") as open_file:
                json.dump(item, open_file, sort_keys=True, indent=4)
                os.system("chmod o+rw {}".format(file_name))  
    print (datetime.datetime.now(),"replay end: ",endtime)
예제 #23
0
def main():
    args = arg_parse()
    data_dir = os.path.join(SUBMITTY_DATA_DIR, "courses")
    data_dirs = data_dir.split(os.sep)
    grade_queue = []
    if not args.times is None:
        starttime = dateutils.read_submitty_date(args.times[0])
        endtime = dateutils.read_submitty_date(args.times[1])
        replay(starttime,endtime)
        exit()
    if len(args.path) == 0:
        print ("ERROR! Must specify at least one path")
        exit()
    for input_path in args.path:
        print ('input path',input_path)
        # handle relative path
        if input_path == '.':
            input_path = os.getcwd()
        if input_path[0] != '/':
            input_path = os.getcwd() + '/' + input_path
        # remove trailing slash (if any)
        input_path = input_path.rstrip('/')
        # split the path into directories
        dirs = input_path.split(os.sep)

        # must be in the known submitty base data directory
        if dirs[0:len(data_dirs)] != data_dirs:
            print("ERROR: BAD REGRADE SUBMISSIONS PATH",input_path)
            raise SystemExit("You need to point to a directory within {}".format(data_dir))

        # Extract directories from provided pattern path (path may be incomplete)
        pattern_semester="*"
        if len(dirs) > len(data_dirs):
            pattern_semester=dirs[len(data_dirs)]
        pattern_course="*"
        if len(dirs) > len(data_dirs)+1:
            pattern_course=dirs[len(data_dirs)+1]
        if len(dirs) > len(data_dirs)+2:
            if (dirs[len(data_dirs)+2] != "submissions"):
                raise SystemExit("You must specify the submissions directory within the course")
        pattern_gradeable="*"
        if len(dirs) > len(data_dirs)+3:
            pattern_gradeable=dirs[len(data_dirs)+3]
        pattern_who="*"
        if len(dirs) > len(data_dirs)+4:
            pattern_who=dirs[len(data_dirs)+4]
        pattern_version="*"
        if len(dirs) > len(data_dirs)+5:
            pattern_version=dirs[len(data_dirs)+5]

        # full pattern may include wildcards!
        pattern = os.path.join(pattern_semester,pattern_course,"submissions",pattern_gradeable,pattern_who,pattern_version)

        print("pattern: ",pattern)

        # Find all matching submissions
        for d in Path(data_dir).glob(pattern):
            d = str(d)
            if os.path.isdir(d):
                my_dirs = d.split(os.sep)
                if len(my_dirs) != len(data_dirs)+6:
                    raise SystemExit("ERROR: directory length not as expected")
                # if requested, only regrade the currently active versions
                if args.active_only and not is_active_version(d):
                    continue
                print("match: ",d)
                my_semester=my_dirs[len(data_dirs)]
                my_course=my_dirs[len(data_dirs)+1]
                my_gradeable=my_dirs[len(data_dirs)+3]
                gradeable_config = os.path.join(data_dir,my_semester,my_course,"config/build/"+"build_"+my_gradeable+".json")
                with open(gradeable_config, 'r') as build_configuration:
                    datastore = json.load(build_configuration)
                    required_capabilities = datastore.get('required_capabilities', 'default')
                    max_grading_time = datastore.get('max_possible_grading_time', -1)

                #get the current time
                queue_time = dateutils.write_submitty_date()
                my_who=my_dirs[len(data_dirs)+4]
                my_version=my_dirs[len(data_dirs)+5]
                my_path=os.path.join(data_dir,my_semester,my_course,"submissions",my_gradeable,my_who,my_version)
                if my_path != d:
                    raise SystemExit("ERROR: path reconstruction failed")
                # add them to the queue

                if '_' not in my_who:
                    my_user = my_who
                    my_team = ""
                    my_is_team = False
                else:
                    my_user = ""
                    my_team = my_who
                    my_is_team = True

                # FIXME: Set this value appropriately
                is_vcs_checkout = False

                grade_queue.append({"semester": my_semester,
                                    "course": my_course,
                                    "gradeable": my_gradeable,
                                    "user": my_user,
                                    "team": my_team,
                                    "who": my_who,
                                    "is_team": my_is_team,
                                    "version": my_version,
                                    "vcs_checkout": is_vcs_checkout,
                                    "required_capabilities" : required_capabilities,
                                    "queue_time":queue_time,
                                    "regrade":True,
                                    "max_possible_grading_time" : max_grading_time})

    # Check before adding a very large number of systems to the queue
    if len(grade_queue) > 50 and not args.no_input:
        inp = input("Found {:d} matching submissions. Add to queue? [y/n]".format(len(grade_queue)))
        if inp.lower() not in ["yes", "y"]:
            raise SystemExit("Aborting...")

    for item in grade_queue:
        file_name = "__".join([item['semester'], item['course'], item['gradeable'], item['who'], item['version']])
        file_name = os.path.join(SUBMITTY_DATA_DIR, "to_be_graded_queue", file_name)
        with open(file_name, "w") as open_file:
            json.dump(item, open_file, sort_keys=True, indent=4)
        os.system("chmod o+rw {}".format(file_name))

    print("Added {:d} to the queue for regrading.".format(len(grade_queue)))
def worker_process(which_machine, address, which_untrusted, my_server):

    # verify the DAEMON_USER is running this script
    if not int(os.getuid()) == int(DAEMON_UID):
        autograding_utils.log_message(
            AUTOGRADING_LOG_PATH,
            JOB_ID,
            message="ERROR: must be run by DAEMON_USER")
        raise SystemExit(
            "ERROR: the submitty_autograding_worker.py script must be run by the DAEMON_USER"
        )

    # ignore keyboard interrupts in the worker processes
    signal.signal(signal.SIGINT, signal.SIG_IGN)
    counter = 0

    servername_workername = "{0}_{1}".format(my_server, address)
    autograding_zip = os.path.join(
        SUBMITTY_DATA_DIR, "autograding_TODO",
        servername_workername + "_" + which_untrusted + "_autograding.zip")
    submission_zip = os.path.join(
        SUBMITTY_DATA_DIR, "autograding_TODO",
        servername_workername + "_" + which_untrusted + "_submission.zip")
    todo_queue_file = os.path.join(
        SUBMITTY_DATA_DIR, "autograding_TODO",
        servername_workername + "_" + which_untrusted + "_queue.json")

    while True:
        if os.path.exists(todo_queue_file):
            try:
                working_directory = os.path.join(
                    "/var/local/submitty/autograding_tmp/", which_untrusted,
                    "tmp")
                results_zip_tmp = grade_item.grade_from_zip(
                    working_directory, which_untrusted, autograding_zip,
                    submission_zip)
                results_zip = os.path.join(
                    SUBMITTY_DATA_DIR, "autograding_DONE",
                    servername_workername + "_" + which_untrusted +
                    "_results.zip")
                done_queue_file = os.path.join(
                    SUBMITTY_DATA_DIR, "autograding_DONE",
                    servername_workername + "_" + which_untrusted +
                    "_queue.json")
                #move doesn't inherit the permissions of the destination directory. Copyfile does.
                shutil.copyfile(results_zip_tmp, results_zip)

                os.remove(results_zip_tmp)
                with open(todo_queue_file, 'r') as infile:
                    queue_obj = json.load(infile)
                    queue_obj["done_time"] = dateutils.write_submitty_date(
                        microseconds=True)
                with open(done_queue_file, 'w') as outfile:
                    json.dump(queue_obj, outfile, sort_keys=True, indent=4)
            except Exception as e:
                autograding_utils.log_message(
                    AUTOGRADING_LOG_PATH,
                    JOB_ID,
                    message="ERROR attempting to unzip graded item: " +
                    which_machine + " " + which_untrusted +
                    ". for more details, see traces entry.")
                autograding_utils.log_stack_trace(AUTOGRADING_STACKTRACE_PATH,
                                                  JOB_ID,
                                                  trace=traceback.format_exc())
                with contextlib.suppress(FileNotFoundError):
                    os.remove(autograding_zip)
                with contextlib.suppress(FileNotFoundError):
                    os.remove(submission_zip)

                #Respond with a failure zip file.
                results_zip = os.path.join(
                    SUBMITTY_DATA_DIR, "autograding_DONE",
                    servername_workername + "_" + which_untrusted +
                    "_results.zip")
                tmp_dir = tempfile.mkdtemp()
                with open(os.path.join(tmp_dir, 'failure.txt'),
                          'w') as outfile:
                    outfile.write("grading failed.\n")

                results_zip_tmp = zipfile.ZipFile(results_zip, 'w')
                results_zip_tmp.write(os.path.join(tmp_dir, 'failure.txt'))
                results_zip_tmp.close()

                shutil.rmtree(tmp_dir)
                done_queue_file = os.path.join(
                    SUBMITTY_DATA_DIR, "autograding_DONE",
                    servername_workername + "_" + which_untrusted +
                    "_queue.json")
                with open(todo_queue_file, 'r') as infile:
                    queue_obj = json.load(infile)
                    queue_obj["done_time"] = dateutils.write_submitty_date(
                        microseconds=True)
                with open(done_queue_file, 'w') as outfile:
                    json.dump(queue_obj, outfile, sort_keys=True, indent=4)
            finally:
                if os.path.exists(autograding_zip):
                    os.remove(autograding_zip)
                if os.path.exists(submission_zip):
                    os.remove(submission_zip)

            with contextlib.suppress(FileNotFoundError):
                os.remove(todo_queue_file)
            counter = 0
        else:
            if counter >= 10:
                print(which_machine, which_untrusted, "wait")
                counter = 0
            counter += 1
            time.sleep(1)
예제 #25
0
def main():
    #In order to successfully re-upload the assignment, we need the directory of the assignment to be inflated,
    #and the semester, course name, and assignment name that it should be uploaded to.
    parser = argparse.ArgumentParser(description='This script was created to help professors re-upload an old semester\'s\
        assignments for autograding and reevaluation. To run it, please create a new assignment via the submitty interface to\
        which you wish to upload a past assignment\'s submissions. Then, run this script.')
    parser.add_argument('-g', '--grade', action='store_true', help='adds assignments to the grading queue.')
    parser.add_argument('ARCHIVED_directory', help='The path to the top level of the old assignment\'s directory tree. This folder\'s\
        subdirectories should mirror a /var/local/submitty/courses/<semester>/<course>/submissions/<assignment_name> folder.')
    parser.add_argument('semester', help='The semester of the course you wish to upload to.')
    parser.add_argument('course_name', help='The name of the course you wish to upload to.')
    parser.add_argument('assignment_name', help='The assignment name you wish to upload to.')
    args = parser.parse_args()

    print("You are about to attempt to copy\n\tSOURCE: " + args.ARCHIVED_directory + "\n\tDESTINATION: "
     + SUBMITTY_DATA_DIR + "/" + args.semester+"/courses/"+args.course_name+"/submissions/"+args.assignment_name)

    #Check the existence of the submission path (the user should have already created the assignment for us to populate)
    CURRENT_course_path = os.path.join(SUBMITTY_DATA_DIR, "courses", args.semester, args.course_name)
    print("Current course path:" + CURRENT_course_path)
    CURRENT_assignment_path = os.path.join(CURRENT_course_path, "submissions", args.assignment_name)
    print("Current assignment path" + CURRENT_assignment_path)
    #we check the bin directory to see if the assignment has ever been built. (Not a guarantee, but high probability of success)
    CURRENT_bin_directory = os.path.join(CURRENT_course_path, "bin", args.assignment_name)
    if not os.path.isdir(CURRENT_bin_directory):
        raise SystemExit("ERROR: The directory " + CURRENT_bin_directory + " does not exist. Please make sure that you\n\t\
            1) Configured the assignment on the course website\n\t2) Did not mistype the program arguments.") 

    else:
        print("SUCCESS: The directory " + CURRENT_assignment_path + " does exist.")    

    #Make a connection to the database and grab the necessary tables.
    database = "submitty_" + args.semester + "_" + args.course_name
    print("Connecting to database: ", end="")
    engine = create_engine("postgresql://{}:{}@{}/{}".format(DB_USER, DB_PASS, DB_HOST,
                                                           database))
    conn = engine.connect()
    metadata = MetaData(bind=engine)
    print("(connection made, metadata bound)...")
    electronic_gradeable_data = Table("electronic_gradeable_data", metadata, autoload=True)
    electronic_gradeable_version = Table("electronic_gradeable_version", metadata, autoload=True)

    course_group = grp.getgrgid(os.stat(os.path.join(SUBMITTY_DATA_DIR,"courses",args.semester,args.course_name)).st_gid)[0]

    #For every user folder in our directory, for every submission folder inside of it, copy over that user/submission, add it to 
    #the database, and create a queue file. 
    for user_folder in sorted(os.listdir(args.ARCHIVED_directory)):
        print("evaluating " + user_folder)
        #If the item we are currently looking at is a directory, we will assume it is a student submission directory.
        ARCHIVED_user_dir = os.path.join(args.ARCHIVED_directory, user_folder)
        print("Starting work on archived dir: " + ARCHIVED_user_dir)
        if not os.path.isdir(ARCHIVED_user_dir):
            print("Skipping the following as it is not a directory: " + ARCHIVED_user_dir)
            continue
        user_name = user_folder
        print("processing user: "******"user_assignment_settings.json"), "r") as open_file:
            user_assignment_settings = json.load(open_file)

        #For every folder inside of the user submission folder.
        for submission in os.listdir(ARCHIVED_user_dir):
            ARCHIVED_submission_path = os.path.join(ARCHIVED_user_dir, submission)
            print("Evaluating submission: " + ARCHIVED_submission_path)
            #If this entry in the directory to be copied is not a directory, it is a user_assignments settings.
            #Right now, we ignore these.
            #TODO copy over the user_assignment_settings files and use them instead of creating new ones.
            if not os.path.isdir(ARCHIVED_submission_path):
                if user_folder == ".submit.timestamp":
                    with open(ARCHIVED_submission_path, 'r') as old_timestamp, open(CURRENT_user_path, 'w') as new_timestamp:
                        for line in old_timestamp:
                            new_timestamp.write(line)
                        os.system("chown -R submitty_php:{} {}".format(course_group, new_timestamp))
                continue
            #if the student's submission dir does not exist, make it.
            if not os.path.isdir(CURRENT_user_path):
                os.makedirs(CURRENT_user_path)
            #The current directory for the new submission
            CURRENT_submission_path = os.path.join(CURRENT_user_path, submission)
            #TODO If the submission already exists, give up?
            if os.path.isdir(CURRENT_submission_path):
                print("Skipped " + CURRENT_submission_path + " as it already exists.")
                continue
            #This permission also sets the underlying submission paths recursively.g
            print("Set permissions on the submission")
            os.system("chown -R submitty_php:{} {}".format(course_group, CURRENT_user_path))

            #TODO: Sort the submissions so that they are guaranteed to be given in chronological (1,2,3,etc) order.
            #copy in the submission directory.
            print("Copied from\n\tSOURCE: " + ARCHIVED_submission_path +"\n\t" + "DESTINATION: " + CURRENT_submission_path)
            shutil.copytree(ARCHIVED_submission_path, CURRENT_submission_path)
            #give the appropriate permissions
            os.system("chown -R submitty_php:{} {}".format(course_group, CURRENT_submission_path))
            #add each submission to the database.
            current_time_string = dateutils.write_submitty_date()

            conn.execute(electronic_gradeable_data.insert(), g_id=args.assignment_name, user_id=user_name,
                         g_version=submission, submission_time=current_time_string)
            #If this is the first submission, create a new entry in the table, otherwise, update.
            #TODO use a more reliable method of determining if this is the first submission.
            if int(submission) == 1:
                print("Entered new user " + user_name + " because submission was " + submission)
                conn.execute(electronic_gradeable_version.insert(), g_id=args.assignment_name, user_id=user_name,
                         active_version=user_assignment_settings['active_version'])
            else:
                print("UPDATED: where g_id is " + args.assignment_name + " and user id is " + user_name + " to value " + str(user_assignment_settings['active_version']))
                stmt = electronic_gradeable_version.update().\
                        where(and_(electronic_gradeable_version.c.g_id==args.assignment_name, electronic_gradeable_version.c.user_id==user_name)).\
                        values(active_version=user_assignment_settings['active_version'])
                conn.execute(stmt)
            with open(os.path.join(CURRENT_user_path, "user_assignment_settings.json"), "w") as open_file:
                json.dump(user_assignment_settings, open_file, indent = 4)

            if args.grade:
                # Create a queue file for each submission
                queue_file = "__".join([args.semester, args.course_name, args.assignment_name, user_name, submission])
                print("Creating queue file:", queue_file)
                queue_file = os.path.join(SUBMITTY_DATA_DIR, "to_be_graded_queue", queue_file)
                with open(queue_file, "w") as open_file:
                    # FIXME: This will need to be adjusted for team assignments
                    # and assignments with special required capabilities!
                    queue_time = dateutils.write_submitty_date()
                    json.dump({"semester": args.semester,
                               "course": args.course_name,
                               "gradeable": args.assignment_name,
                               "user": user_name,
                               "team": "",
                               "who": user_name,
                               "is_team": False,
                               "version": submission,
                               "required_capabilities" : "default",
                               "queue_time": queue_time,
                               "regrade": True,
                               "max_possible_grading_time": -1}, open_file)

    conn.close()
def prepare_job(my_name, which_machine, which_untrusted, next_directory,
                next_to_grade):
    # verify the hwcron user is running this script
    if not int(os.getuid()) == int(HWCRON_UID):
        grade_items_logging.log_message(JOB_ID,
                                        message="ERROR: must be run by hwcron")
        raise SystemExit(
            "ERROR: the grade_item.py script must be run by the hwcron user")

    if which_machine == 'localhost':
        address = which_machine
    else:
        address = which_machine.split('@')[1]
    # prepare the zip files
    try:
        autograding_zip_tmp, submission_zip_tmp = grade_item.prepare_autograding_and_submission_zip(
            which_machine, which_untrusted, next_directory, next_to_grade)
        fully_qualified_domain_name = socket.getfqdn()
        servername_workername = "{0}_{1}".format(fully_qualified_domain_name,
                                                 address)
        autograding_zip = os.path.join(
            SUBMITTY_DATA_DIR, "autograding_TODO",
            servername_workername + "_" + which_untrusted + "_autograding.zip")
        submission_zip = os.path.join(
            SUBMITTY_DATA_DIR, "autograding_TODO",
            servername_workername + "_" + which_untrusted + "_submission.zip")
        todo_queue_file = os.path.join(
            SUBMITTY_DATA_DIR, "autograding_TODO",
            servername_workername + "_" + which_untrusted + "_queue.json")

        with open(next_to_grade, 'r') as infile:
            queue_obj = json.load(infile)
            queue_obj["which_untrusted"] = which_untrusted
            queue_obj["which_machine"] = which_machine
            queue_obj["ship_time"] = dateutils.write_submitty_date(
                microseconds=True)
    except Exception as e:
        grade_items_logging.log_message(
            JOB_ID,
            message=
            "ERROR: failed preparing submission zip or accessing next to grade "
            + str(e))
        print(
            "ERROR: failed preparing submission zip or accessing next to grade ",
            e)
        return False

    if address == "localhost":
        try:
            shutil.move(autograding_zip_tmp, autograding_zip)
            shutil.move(submission_zip_tmp, submission_zip)
            with open(todo_queue_file, 'w') as outfile:
                json.dump(queue_obj, outfile, sort_keys=True, indent=4)
        except Exception as e:
            grade_items_logging.log_message(
                JOB_ID,
                message=
                "ERROR: could not move files due to the following error: " +
                str(e))
            print(
                "ERROR: could not move files due to the following error: {0}".
                format(e))
            return False
    else:
        try:
            user, host = which_machine.split("@")
            ssh = paramiko.SSHClient()
            ssh.get_host_keys()
            ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())

            ssh.connect(hostname=host, username=user)
            sftp = ssh.open_sftp()

            sftp.put(autograding_zip_tmp, autograding_zip)
            sftp.put(submission_zip_tmp, submission_zip)
            with open(todo_queue_file, 'w') as outfile:
                json.dump(queue_obj, outfile, sort_keys=True, indent=4)
            sftp.put(todo_queue_file, todo_queue_file)
            os.remove(todo_queue_file)
            print("Successfully forwarded files to {0}".format(my_name))
            success = True
        except Exception as e:
            grade_items_logging.log_message(
                JOB_ID,
                message=
                "ERROR: could not move files due to the following error: " +
                str(e))
            print(
                "Could not move files due to the following error: {0}".format(
                    e))
            success = False
        finally:
            sftp.close()
            ssh.close()
            os.remove(autograding_zip_tmp)
            os.remove(submission_zip_tmp)
            return success

    # log completion of job preparation
    obj = grade_item.load_queue_file_obj(JOB_ID, next_directory, next_to_grade)
    partial_path = os.path.join(obj["gradeable"], obj["who"],
                                str(obj["version"]))
    item_name = os.path.join(obj["semester"], obj["course"], "submissions",
                             partial_path)
    is_batch = "regrade" in obj and obj["regrade"]
    grade_items_logging.log_message(JOB_ID,
                                    jobname=item_name,
                                    which_untrusted=which_untrusted,
                                    is_batch=is_batch,
                                    message="Prepared job for " +
                                    which_machine)
    return True
def worker_process(which_machine, address, which_untrusted, my_server):

    # verify the DAEMON_USER is running this script
    if not int(os.getuid()) == int(DAEMON_UID):
        autograding_utils.log_message(
            AUTOGRADING_LOG_PATH,
            JOB_ID,
            message="ERROR: must be run by DAEMON_USER")
        raise SystemExit(
            "ERROR: the submitty_autograding_worker.py script must be run by the DAEMON_USER"
        )

    # ignore keyboard interrupts in the worker processes
    signal.signal(signal.SIGINT, signal.SIG_IGN)
    counter = 0

    # The full name of this worker
    worker_name = f"{my_server}_{address}_{which_untrusted}"

    # Set up key autograding_DONE directories
    done_dir = os.path.join(SUBMITTY_DATA_DIR, "autograding_DONE")
    done_queue_file = os.path.join(done_dir, f"{worker_name}_queue.json")
    results_zip = os.path.join(done_dir, f"{worker_name}_results.zip")

    # Set up key autograding_TODO directories
    todo_dir = os.path.join(SUBMITTY_DATA_DIR, "autograding_TODO")
    autograding_zip = os.path.join(todo_dir, f"{worker_name}_autograding.zip")
    submission_zip = os.path.join(todo_dir, f"{worker_name}_submission.zip")
    todo_queue_file = os.path.join(todo_dir, f"{worker_name}_queue.json")

    # Establish the the directory in which we will do our work
    working_directory = os.path.join(SUBMITTY_DATA_DIR, 'autograding_tmp',
                                     which_untrusted, "tmp")

    while True:
        if os.path.exists(todo_queue_file):
            try:
                # Attempt to grade the submission. Get back the location of the results.
                results_zip_tmp = grade_item.grade_from_zip(
                    working_directory, which_untrusted, autograding_zip,
                    submission_zip)
                shutil.copyfile(results_zip_tmp, results_zip)
                os.remove(results_zip_tmp)
                # At this point, we will assume that grading has progressed successfully enough to
                # return a coherent answer, and will say as much in the done queue file
                response = {
                    'status': 'success',
                    'message': 'Grading completed successfully'
                }
            except Exception:
                # If we threw an error while grading, log it.
                autograding_utils.log_message(
                    AUTOGRADING_LOG_PATH,
                    JOB_ID,
                    message=
                    f"ERROR attempting to unzip graded item: {which_machine} "
                    f"{which_untrusted}. for more details, see traces entry.")
                autograding_utils.log_stack_trace(AUTOGRADING_STACKTRACE_PATH,
                                                  JOB_ID,
                                                  trace=traceback.format_exc())
                # TODO: It is possible that autograding failed after multiple steps.
                # In this case, we may be able to salvage a portion of the autograding_results
                # directory.

                # Because we failed grading, we will respond with an empty results zip.
                results_zip_tmp = zipfile.ZipFile(results_zip, 'w')
                results_zip_tmp.close()

                # We will also respond with a done_queue_file which contains a failure message.
                response = {
                    'status': 'fail',
                    'message': traceback.format_exc()
                }
            finally:
                # Regardless of if we succeeded or failed, create a done queue file to
                # send to the shipper.
                with open(todo_queue_file, 'r') as infile:
                    queue_obj = json.load(infile)
                    queue_obj["done_time"] = dateutils.write_submitty_date(
                        milliseconds=True)
                    queue_obj['autograding_status'] = response
                with open(done_queue_file, 'w') as outfile:
                    json.dump(queue_obj, outfile, sort_keys=True, indent=4)
                # Clean up temporary files.
                with contextlib.suppress(FileNotFoundError):
                    os.remove(autograding_zip)
                with contextlib.suppress(FileNotFoundError):
                    os.remove(submission_zip)
                with contextlib.suppress(FileNotFoundError):
                    os.remove(todo_queue_file)
            counter = 0
        else:
            if counter >= 10:
                print(which_machine, which_untrusted, "wait")
                counter = 0
            counter += 1
            time.sleep(1)
예제 #28
0
def prepare_job(my_name,which_machine,which_untrusted,next_directory,next_to_grade):
    # verify the DAEMON_USER is running this script
    if not int(os.getuid()) == int(DAEMON_UID):
        autograding_utils.log_message(AUTOGRADING_LOG_PATH, JOB_ID, message="ERROR: must be run by DAEMON_USER")
        raise SystemExit("ERROR: the submitty_autograding_shipper.py script must be run by the DAEMON_USER")

    if which_machine == 'localhost':
        address = which_machine
    else:
        address = which_machine.split('@')[1]

    # prepare the zip files
    try:
        autograding_zip_tmp,submission_zip_tmp = packer_unpacker.prepare_autograding_and_submission_zip(which_machine,which_untrusted,next_directory,next_to_grade)
        fully_qualified_domain_name = socket.getfqdn()
        servername_workername = "{0}_{1}".format(fully_qualified_domain_name, address)
        autograding_zip = os.path.join(SUBMITTY_DATA_DIR,"autograding_TODO",servername_workername+"_"+which_untrusted+"_autograding.zip")
        submission_zip = os.path.join(SUBMITTY_DATA_DIR,"autograding_TODO",servername_workername+"_"+which_untrusted+"_submission.zip")
        todo_queue_file = os.path.join(SUBMITTY_DATA_DIR,"autograding_TODO",servername_workername+"_"+which_untrusted+"_queue.json")

        with open(next_to_grade, 'r') as infile:
            queue_obj = json.load(infile)
            queue_obj["which_untrusted"] = which_untrusted
            queue_obj["which_machine"] = which_machine
            queue_obj["ship_time"] = dateutils.write_submitty_date(microseconds=True)
    except Exception as e:
        autograding_utils.log_stack_trace(AUTOGRADING_STACKTRACE_PATH, job_id=JOB_ID, trace=traceback.format_exc())
        autograding_utils.log_message(AUTOGRADING_LOG_PATH, JOB_ID, message="ERROR: failed preparing submission zip or accessing next to grade "+str(e))
        print("ERROR: failed preparing submission zip or accessing next to grade ", e)
        return False

    if address == "localhost":
        try:
            shutil.move(autograding_zip_tmp,autograding_zip)
            shutil.move(submission_zip_tmp,submission_zip)
            with open(todo_queue_file, 'w') as outfile:
                json.dump(queue_obj, outfile, sort_keys=True, indent=4)
        except Exception as e:
            autograding_utils.log_stack_trace(AUTOGRADING_STACKTRACE_PATH, job_id=JOB_ID, trace=traceback.format_exc())
            autograding_utils.log_message(AUTOGRADING_LOG_PATH, JOB_ID, message="ERROR: could not move files due to the following error: "+str(e))
            print("ERROR: could not move files due to the following error: {0}".format(e))
            return False
    else:
        sftp = ssh = None
        try:
            user, host = which_machine.split("@")

            ssh = establish_ssh_connection(my_name, user, host)
            sftp = ssh.open_sftp()
            sftp.put(autograding_zip_tmp,autograding_zip)
            sftp.put(submission_zip_tmp,submission_zip)
            with open(todo_queue_file, 'w') as outfile:
                json.dump(queue_obj, outfile, sort_keys=True, indent=4)
            sftp.put(todo_queue_file, todo_queue_file)
            os.remove(todo_queue_file)
            print("Successfully forwarded files to {0}".format(my_name))
            success = True
        except Exception as e:
            autograding_utils.log_stack_trace(AUTOGRADING_STACKTRACE_PATH, job_id=JOB_ID, trace=traceback.format_exc())
            autograding_utils.log_message(AUTOGRADING_LOG_PATH, JOB_ID, message="ERROR: could not move files due to the following error: "+str(e))
            print("Could not move files due to the following error: {0}".format(e))
            success = False
        finally:
            if sftp:
                sftp.close()
            if ssh:
                ssh.close()
            os.remove(autograding_zip_tmp)
            os.remove(submission_zip_tmp)
            return success

    # log completion of job preparation
    obj = packer_unpacker.load_queue_file_obj(JOB_ID,next_directory,next_to_grade)
    if "generate_output" not in obj:
        partial_path = os.path.join(obj["gradeable"],obj["who"],str(obj["version"]))
        item_name = os.path.join(obj["semester"],obj["course"],"submissions",partial_path)
    elif obj["generate_output"]:
        item_name = os.path.join(obj["semester"],obj["course"],"generated_output",obj["gradeable"])
    is_batch = "regrade" in obj and obj["regrade"]
    autograding_utils.log_message(AUTOGRADING_LOG_PATH, JOB_ID, jobname=item_name, which_untrusted=which_untrusted,
                                    is_batch=is_batch, message="Prepared job for " + which_machine)
    return True
예제 #29
0
def main():
    args = arg_parse()
    data_dir = os.path.join(SUBMITTY_DATA_DIR, "courses")
    data_dirs = data_dir.split(os.sep)
    grade_queue = []
    if not args.times is None:
        starttime = dateutils.read_submitty_date(args.times[0])
        endtime = dateutils.read_submitty_date(args.times[1])
        replay(starttime,endtime)
        exit()
    if len(args.path) == 0:
        print ("ERROR! Must specify at least one path")
        exit()
    for input_path in args.path:
        print ('input path',input_path)
        # handle relative path
        if input_path == '.':
            input_path = os.getcwd()
        if input_path[0] != '/':
            input_path = os.getcwd() + '/' + input_path
        # remove trailing slash (if any)
        input_path = input_path.rstrip('/')
        # split the path into directories
        dirs = input_path.split(os.sep)

        # must be in the known submitty base data directory
        if dirs[0:len(data_dirs)] != data_dirs:
            print("ERROR: BAD REGRADE SUBMISSIONS PATH",input_path)
            raise SystemExit("You need to point to a directory within {}".format(data_dir))

        # Extract directories from provided pattern path (path may be incomplete)
        pattern_semester="*"
        if len(dirs) > len(data_dirs):
            pattern_semester=dirs[len(data_dirs)]
        pattern_course="*"
        if len(dirs) > len(data_dirs)+1:
            pattern_course=dirs[len(data_dirs)+1]
        if len(dirs) > len(data_dirs)+2:
            if (dirs[len(data_dirs)+2] != "submissions"):
                raise SystemExit("You must specify the submissions directory within the course")
        pattern_gradeable="*"
        if len(dirs) > len(data_dirs)+3:
            pattern_gradeable=dirs[len(data_dirs)+3]
        pattern_who="*"
        if len(dirs) > len(data_dirs)+4:
            pattern_who=dirs[len(data_dirs)+4]
        pattern_version="*"
        if len(dirs) > len(data_dirs)+5:
            pattern_version=dirs[len(data_dirs)+5]

        # full pattern may include wildcards!
        pattern = os.path.join(data_dir,pattern_semester,pattern_course,"submissions",pattern_gradeable,pattern_who,pattern_version)

        print("pattern: ",pattern)

        # Find all matching submissions
        for d in glob.glob(pattern):
            if os.path.isdir(d):
                my_dirs = d.split(os.sep)
                if len(my_dirs) != len(data_dirs)+6:
                    raise SystemExit("ERROR: directory length not as expected")
                # if requested, only regrade the currently active versions
                if args.active_only and not is_active_version(d):
                    continue
                print("match: ",d)
                my_semester=my_dirs[len(data_dirs)]
                my_course=my_dirs[len(data_dirs)+1]
                my_gradeable=my_dirs[len(data_dirs)+3]
                gradeable_config = os.path.join(data_dir,my_semester,my_course,"config/build/"+"build_"+my_gradeable+".json")
                with open(gradeable_config, 'r') as build_configuration:
                    datastore = json.load(build_configuration)
                    required_capabilities = datastore.get('required_capabilities', 'default')
                    max_grading_time = datastore.get('max_possible_grading_time', -1)

                #get the current time
                queue_time = dateutils.write_submitty_date()
                my_who=my_dirs[len(data_dirs)+4]
                my_version=my_dirs[len(data_dirs)+5]
                my_path=os.path.join(data_dir,my_semester,my_course,"submissions",my_gradeable,my_who,my_version)
                if my_path != d:
                    raise SystemExit("ERROR: path reconstruction failed")
                # add them to the queue

                if '_' not in my_who:
                    my_user = my_who
                    my_team = ""
                    my_is_team = False
                else:
                    my_user = ""
                    my_team = my_who
                    my_is_team = True

                grade_queue.append({"semester": my_semester,
                                    "course": my_course,
                                    "gradeable": my_gradeable,
                                    "user": my_user,
                                    "team": my_team,
                                    "who": my_who,
                                    "is_team": my_is_team,
                                    "version": my_version,
                                    "required_capabilities" : required_capabilities,
                                    "queue_time":queue_time,
                                    "regrade":True,
                                    "max_possible_grading_time" : max_grading_time})

    # Check before adding a very large number of systems to the queue
    if len(grade_queue) > 50 and not args.no_input:
        inp = input("Found {:d} matching submissions. Add to queue? [y/n]".format(len(grade_queue)))
        if inp.lower() not in ["yes", "y"]:
            raise SystemExit("Aborting...")

    for item in grade_queue:
        file_name = "__".join([item['semester'], item['course'], item['gradeable'], item['who'], item['version']])
        file_name = os.path.join(SUBMITTY_DATA_DIR, "to_be_graded_queue", file_name)
        with open(file_name, "w") as open_file:
            json.dump(item, open_file, sort_keys=True, indent=4)
        os.system("chmod o+rw {}".format(file_name))

    print("Added {:d} to the queue for regrading.".format(len(grade_queue)))
예제 #30
0
def prepare_autograding_and_submission_zip(which_machine,which_untrusted,next_directory,next_to_grade):
    os.chdir(SUBMITTY_DATA_DIR)

    # generate a random id to be used to track this job in the autograding logs
    job_id = ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(6))

    # --------------------------------------------------------
    # figure out what we're supposed to grade & error checking
    obj = load_queue_file_obj(job_id,next_directory,next_to_grade)

    partial_path = os.path.join(obj["gradeable"],obj["who"],str(obj["version"]))
    item_name = os.path.join(obj["semester"],obj["course"],"submissions",partial_path)
    submission_path = os.path.join(SUBMITTY_DATA_DIR,"courses",item_name)
    if not os.path.isdir(submission_path):
        grade_items_logging.log_message(job_id, message="ERROR: the submission directory does not exist " + submission_path)
        raise RuntimeError("ERROR: the submission directory does not exist", submission_path)
    print(which_machine,which_untrusted,"prepare zip",submission_path)
    is_vcs,vcs_type,vcs_base_url,vcs_subdirectory = get_vcs_info(SUBMITTY_DATA_DIR,obj["semester"],obj["course"],obj["gradeable"],obj["who"],obj["team"])

    is_batch_job = "regrade" in obj and obj["regrade"]
    is_batch_job_string = "BATCH" if is_batch_job else "INTERACTIVE"

    queue_time = get_queue_time(next_directory,next_to_grade)
    queue_time_longstring = dateutils.write_submitty_date(queue_time)
    grading_began = dateutils.get_current_time()
    waittime = (grading_began-queue_time).total_seconds()
    grade_items_logging.log_message(job_id,is_batch_job,"zip",item_name,"wait:",waittime,"")

    # --------------------------------------------------------
    # various paths
    provided_code_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"provided_code",obj["gradeable"])
    test_input_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"test_input",obj["gradeable"])
    test_output_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"test_output",obj["gradeable"])
    custom_validation_code_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"custom_validation_code",obj["gradeable"])
    bin_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"bin",obj["gradeable"])
    form_json_config = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"config","form","form_"+obj["gradeable"]+".json")
    complete_config = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"config","complete_config","complete_config_"+obj["gradeable"]+".json")

    if not os.path.exists(form_json_config):
        grade_items_logging.log_message(job_id,message="ERROR: the form json file does not exist " + form_json_config)
        raise RuntimeError("ERROR: the form json file does not exist ",form_json_config)
    if not os.path.exists(complete_config):
        grade_items_logging.log_message(job_id,message="ERROR: the complete config file does not exist " + complete_config)
        raise RuntimeError("ERROR: the complete config file does not exist ",complete_config)

    # --------------------------------------------------------------------
    # MAKE TEMPORARY DIRECTORY & COPY THE NECESSARY FILES THERE
    tmp = tempfile.mkdtemp()
    tmp_autograding = os.path.join(tmp,"TMP_AUTOGRADING")
    os.mkdir(tmp_autograding)
    tmp_submission = os.path.join(tmp,"TMP_SUBMISSION")
    os.mkdir(tmp_submission)

    copytree_if_exists(provided_code_path,os.path.join(tmp_autograding,"provided_code"))
    copytree_if_exists(test_input_path,os.path.join(tmp_autograding,"test_input"))
    copytree_if_exists(test_output_path,os.path.join(tmp_autograding,"test_output"))
    copytree_if_exists(custom_validation_code_path,os.path.join(tmp_autograding,"custom_validation_code"))
    copytree_if_exists(bin_path,os.path.join(tmp_autograding,"bin"))
    shutil.copy(form_json_config,os.path.join(tmp_autograding,"form.json"))
    shutil.copy(complete_config,os.path.join(tmp_autograding,"complete_config.json"))

    checkout_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"checkout",partial_path)
    results_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"results",partial_path)

    # grab a copy of the current history.json file (if it exists)
    history_file = os.path.join(results_path,"history.json")
    history_file_tmp = ""
    if os.path.isfile(history_file):
        shutil.copy(history_file,os.path.join(tmp_submission,"history.json"))
    # get info from the gradeable config file
    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    checkout_subdirectory = complete_config_obj["autograding"].get("use_checkout_subdirectory","")
    checkout_subdir_path = os.path.join(checkout_path,checkout_subdirectory)
    queue_file = os.path.join(next_directory,next_to_grade)

    # switch to tmp directory
    os.chdir(tmp)

    # make the logs directory
    tmp_logs = os.path.join(tmp,"TMP_SUBMISSION","tmp_logs")
    os.makedirs(tmp_logs)
    # 'touch' a file in the logs folder
    open(os.path.join(tmp_logs,"overall.txt"), 'a')

    # --------------------------------------------------------------------
    # CONFIRM WE HAVE A CHECKOUT OF THE STUDENT'S REPO
    if is_vcs:
        # there should be a checkout log file in the results directory
        # move that file to the tmp logs directory..
        vcs_checkout_logfile = os.path.join(results_path,"logs","vcs_checkout.txt")
        if os.path.isfile(vcs_checkout_logfile):
            shutil.move(vcs_checkout_logfile,tmp_logs)
        else:
            grade_items_logging.log_message(JOB_ID, message=str(my_name)+" ERROR: missing vcs_checkout.txt logfile "+str(vcs_checkout_logfile))


    copytree_if_exists(submission_path,os.path.join(tmp_submission,"submission"))
    copytree_if_exists(checkout_path,os.path.join(tmp_submission,"checkout"))
    obj["queue_time"] = queue_time_longstring
    obj["regrade"] = is_batch_job
    obj["waittime"] = waittime
    obj["job_id"] = job_id

    with open(os.path.join(tmp_submission,"queue_file.json"),'w') as outfile:
        json.dump(obj,outfile,sort_keys=True,indent=4,separators=(',', ': '))

    grading_began_longstring = dateutils.write_submitty_date(grading_began)
    with open(os.path.join(tmp_submission,".grading_began"), 'w') as f:
        print (grading_began_longstring,file=f)

    # zip up autograding & submission folders
    filehandle1, my_autograding_zip_file =tempfile.mkstemp()
    filehandle2, my_submission_zip_file =tempfile.mkstemp()
    grade_item.zip_my_directory(tmp_autograding,my_autograding_zip_file)
    grade_item.zip_my_directory(tmp_submission,my_submission_zip_file)
    os.close(filehandle1)
    os.close(filehandle2)
    # cleanup
    shutil.rmtree(tmp_autograding)
    shutil.rmtree(tmp_submission)
    shutil.rmtree(tmp)

    #grade_items_logging.log_message(job_id,is_batch_job,"done zip",item_name)

    return (my_autograding_zip_file,my_submission_zip_file)
예제 #31
0
def prepare_autograding_and_submission_zip(which_machine, which_untrusted,
                                           next_directory, next_to_grade):
    os.chdir(SUBMITTY_DATA_DIR)

    # generate a random id to be used to track this job in the autograding logs
    job_id = ''.join(
        random.choice(string.ascii_letters + string.digits) for _ in range(6))

    # --------------------------------------------------------
    # figure out what we're supposed to grade & error checking
    obj = load_queue_file_obj(job_id, next_directory, next_to_grade)

    partial_path = os.path.join(obj["gradeable"], obj["who"],
                                str(obj["version"]))
    item_name = os.path.join(obj["semester"], obj["course"], "submissions",
                             partial_path)
    submission_path = os.path.join(SUBMITTY_DATA_DIR, "courses", item_name)
    if not os.path.isdir(submission_path):
        grade_items_logging.log_message(
            job_id,
            message="ERROR: the submission directory does not exist " +
            submission_path)
        raise RuntimeError("ERROR: the submission directory does not exist",
                           submission_path)
    print(which_machine, which_untrusted, "prepare zip", submission_path)
    is_vcs, vcs_type, vcs_base_url, vcs_subdirectory = get_vcs_info(
        SUBMITTY_DATA_DIR, obj["semester"], obj["course"], obj["gradeable"],
        obj["who"], obj["team"])

    is_batch_job = "regrade" in obj and obj["regrade"]
    is_batch_job_string = "BATCH" if is_batch_job else "INTERACTIVE"

    queue_time = get_queue_time(next_directory, next_to_grade)
    queue_time_longstring = dateutils.write_submitty_date(queue_time)
    grading_began = dateutils.get_current_time()
    waittime = (grading_began - queue_time).total_seconds()
    grade_items_logging.log_message(job_id, is_batch_job, "zip", item_name,
                                    "wait:", waittime, "")

    # --------------------------------------------------------
    # various paths
    provided_code_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                      obj["semester"], obj["course"],
                                      "provided_code", obj["gradeable"])
    test_input_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                   obj["semester"], obj["course"],
                                   "test_input", obj["gradeable"])
    test_output_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                    obj["semester"], obj["course"],
                                    "test_output", obj["gradeable"])
    custom_validation_code_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                               obj["semester"], obj["course"],
                                               "custom_validation_code",
                                               obj["gradeable"])
    bin_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                            obj["course"], "bin", obj["gradeable"])
    form_json_config = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                    obj["semester"], obj["course"], "config",
                                    "form",
                                    "form_" + obj["gradeable"] + ".json")
    complete_config = os.path.join(
        SUBMITTY_DATA_DIR, "courses", obj["semester"], obj["course"], "config",
        "complete_config", "complete_config_" + obj["gradeable"] + ".json")

    if not os.path.exists(form_json_config):
        grade_items_logging.log_message(
            job_id,
            message="ERROR: the form json file does not exist " +
            form_json_config)
        raise RuntimeError("ERROR: the form json file does not exist ",
                           form_json_config)
    if not os.path.exists(complete_config):
        grade_items_logging.log_message(
            job_id,
            message="ERROR: the complete config file does not exist " +
            complete_config)
        raise RuntimeError("ERROR: the complete config file does not exist ",
                           complete_config)

    # --------------------------------------------------------------------
    # MAKE TEMPORARY DIRECTORY & COPY THE NECESSARY FILES THERE
    tmp = tempfile.mkdtemp()
    tmp_autograding = os.path.join(tmp, "TMP_AUTOGRADING")
    os.mkdir(tmp_autograding)
    tmp_submission = os.path.join(tmp, "TMP_SUBMISSION")
    os.mkdir(tmp_submission)

    copytree_if_exists(provided_code_path,
                       os.path.join(tmp_autograding, "provided_code"))
    copytree_if_exists(test_input_path,
                       os.path.join(tmp_autograding, "test_input"))
    copytree_if_exists(test_output_path,
                       os.path.join(tmp_autograding, "test_output"))
    copytree_if_exists(custom_validation_code_path,
                       os.path.join(tmp_autograding, "custom_validation_code"))
    copytree_if_exists(bin_path, os.path.join(tmp_autograding, "bin"))
    shutil.copy(form_json_config, os.path.join(tmp_autograding, "form.json"))
    shutil.copy(complete_config,
                os.path.join(tmp_autograding, "complete_config.json"))

    checkout_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                                 obj["course"], "checkout", partial_path)
    results_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                                obj["course"], "results", partial_path)

    # grab a copy of the current history.json file (if it exists)
    history_file = os.path.join(results_path, "history.json")
    history_file_tmp = ""
    if os.path.isfile(history_file):
        shutil.copy(history_file, os.path.join(tmp_submission, "history.json"))
    # get info from the gradeable config file
    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    checkout_subdirectory = complete_config_obj["autograding"].get(
        "use_checkout_subdirectory", "")
    checkout_subdir_path = os.path.join(checkout_path, checkout_subdirectory)
    queue_file = os.path.join(next_directory, next_to_grade)

    # switch to tmp directory
    os.chdir(tmp)

    # make the logs directory
    tmp_logs = os.path.join(tmp, "TMP_SUBMISSION", "tmp_logs")
    os.makedirs(tmp_logs)
    # 'touch' a file in the logs folder
    open(os.path.join(tmp_logs, "overall.txt"), 'a')

    # grab the submission time
    with open(os.path.join(submission_path,
                           ".submit.timestamp")) as submission_time_file:
        submission_string = submission_time_file.read().rstrip()
    submission_datetime = dateutils.read_submitty_date(submission_string)

    # --------------------------------------------------------------------
    # CHECKOUT THE STUDENT's REPO
    if is_vcs:

        # cleanup the previous checkout (if it exists)
        shutil.rmtree(checkout_path, ignore_errors=True)
        os.makedirs(checkout_path, exist_ok=True)

        try:
            # If we are public or private github, we will have an empty vcs_subdirectory
            if vcs_subdirectory == '':
                with open(os.path.join(
                        submission_path,
                        ".submit.VCS_CHECKOUT")) as submission_vcs_file:
                    VCS_JSON = json.load(submission_vcs_file)
                    git_user_id = VCS_JSON["git_user_id"]
                    git_repo_id = VCS_JSON["git_repo_id"]
                    if not valid_github_user_id(git_user_id):
                        raise Exception(
                            "Invalid GitHub user/organization name: '" +
                            git_user_id + "'")
                    if not valid_github_repo_id(git_repo_id):
                        raise Exception("Invalid GitHub repository name: '" +
                                        git_repo_id + "'")
                    # construct path for GitHub
                    vcs_path = "https://www.github.com/" + git_user_id + "/" + git_repo_id

            # is vcs_subdirectory standalone or should it be combined with base_url?
            elif vcs_subdirectory[0] == '/' or '://' in vcs_subdirectory:
                vcs_path = vcs_subdirectory
            else:
                if '://' in vcs_base_url:
                    vcs_path = urllib.parse.urljoin(vcs_base_url,
                                                    vcs_subdirectory)
                else:
                    vcs_path = os.path.join(vcs_base_url, vcs_subdirectory)

            with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
                print("====================================\nVCS CHECKOUT",
                      file=f)
                print('vcs_base_url', vcs_base_url, file=f)
                print('vcs_subdirectory', vcs_subdirectory, file=f)
                print('vcs_path', vcs_path, file=f)
                print(['/usr/bin/git', 'clone', vcs_path, checkout_path],
                      file=f)

            # git clone may fail -- because repository does not exist,
            # or because we don't have appropriate access credentials
            try:
                subprocess.check_call(
                    ['/usr/bin/git', 'clone', vcs_path, checkout_path])
                os.chdir(checkout_path)

                # determine which version we need to checkout
                # if the repo is empty or the master branch does not exist, this command will fail
                try:
                    what_version = subprocess.check_output([
                        'git', 'rev-list', '-n', '1',
                        '--before="' + submission_string + '"', 'master'
                    ])
                    what_version = str(what_version.decode('utf-8')).rstrip()
                    if what_version == "":
                        # oops, pressed the grade button before a valid commit
                        shutil.rmtree(checkout_path, ignore_errors=True)
                    else:
                        # and check out the right version
                        subprocess.call(
                            ['git', 'checkout', '-b', 'grade', what_version])
                    os.chdir(tmp)
                    subprocess.call(['ls', '-lR', checkout_path],
                                    stdout=open(tmp_logs + "/overall.txt",
                                                'a'))
                    obj['revision'] = what_version

                # exception on git rev-list
                except subprocess.CalledProcessError as error:
                    grade_items_logging.log_message(
                        job_id,
                        message=
                        "ERROR: failed to determine version on master branch "
                        + str(error))
                    os.chdir(checkout_path)
                    with open(
                            os.path.join(
                                checkout_path,
                                "failed_to_determine_version_on_master_branch.txt"
                            ), 'w') as f:
                        print(str(error), file=f)
                        print("\n", file=f)
                        print(
                            "Check to be sure the repository is not empty.\n",
                            file=f)
                        print(
                            "Check to be sure the repository has a master branch.\n",
                            file=f)
                        print(
                            "And check to be sure the timestamps on the master branch are reasonable.\n",
                            file=f)

            # exception on git clone
            except subprocess.CalledProcessError as error:
                grade_items_logging.log_message(
                    job_id,
                    message="ERROR: failed to clone repository " + str(error))
                os.chdir(checkout_path)
                with open(
                        os.path.join(checkout_path,
                                     "failed_to_clone_repository.txt"),
                        'w') as f:
                    print(str(error), file=f)
                    print("\n", file=f)
                    print("Check to be sure the repository exists.\n", file=f)
                    print(
                        "And check to be sure the submitty_daemon user has appropriate access credentials.\n",
                        file=f)

        # exception in constructing full git repository url/path
        except Exception as error:
            grade_items_logging.log_message(
                job_id,
                message="ERROR: failed to construct valid repository url/path"
                + str(error))
            os.chdir(checkout_path)
            with open(
                    os.path.join(
                        checkout_path,
                        "failed_to_construct_valid_repository_url.txt"),
                    'w') as f:
                print(str(error), file=f)
                print("\n", file=f)
                print("Check to be sure the repository exists.\n", file=f)
                print(
                    "And check to be sure the submitty_daemon user has appropriate access credentials.\n",
                    file=f)

    copytree_if_exists(submission_path,
                       os.path.join(tmp_submission, "submission"))
    copytree_if_exists(checkout_path, os.path.join(tmp_submission, "checkout"))
    obj["queue_time"] = queue_time_longstring
    obj["regrade"] = is_batch_job
    obj["waittime"] = waittime
    obj["job_id"] = job_id

    with open(os.path.join(tmp_submission, "queue_file.json"), 'w') as outfile:
        json.dump(obj,
                  outfile,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '))

    grading_began_longstring = dateutils.write_submitty_date(grading_began)
    with open(os.path.join(tmp_submission, ".grading_began"), 'w') as f:
        print(grading_began_longstring, file=f)

    # zip up autograding & submission folders
    filehandle1, my_autograding_zip_file = tempfile.mkstemp()
    filehandle2, my_submission_zip_file = tempfile.mkstemp()
    grade_item.zip_my_directory(tmp_autograding, my_autograding_zip_file)
    grade_item.zip_my_directory(tmp_submission, my_submission_zip_file)
    os.close(filehandle1)
    os.close(filehandle2)
    # cleanup
    shutil.rmtree(tmp_autograding)
    shutil.rmtree(tmp_submission)
    shutil.rmtree(tmp)

    #grade_items_logging.log_message(job_id,is_batch_job,"done zip",item_name)

    return (my_autograding_zip_file, my_submission_zip_file)
예제 #32
0
def grade_from_zip(my_autograding_zip_file, my_submission_zip_file,
                   which_untrusted):
    os.chdir(SUBMITTY_DATA_DIR)
    tmp = os.path.join("/var/local/submitty/autograding_tmp/", which_untrusted,
                       "tmp")

    # clean up old usage of this directory
    shutil.rmtree(tmp, ignore_errors=True)
    os.makedirs(tmp)

    which_machine = socket.gethostname()

    # unzip autograding and submission folders
    tmp_autograding = os.path.join(tmp, "TMP_AUTOGRADING")
    tmp_submission = os.path.join(tmp, "TMP_SUBMISSION")
    unzip_this_file(my_autograding_zip_file, tmp_autograding)
    unzip_this_file(my_submission_zip_file, tmp_submission)
    os.remove(my_autograding_zip_file)
    os.remove(my_submission_zip_file)

    tmp_logs = os.path.join(tmp, "TMP_SUBMISSION", "tmp_logs")

    queue_file = os.path.join(tmp_submission, "queue_file.json")
    with open(queue_file, 'r') as infile:
        queue_obj = json.load(infile)

    queue_time_longstring = queue_obj["queue_time"]
    waittime = queue_obj["waittime"]
    is_batch_job = queue_obj["regrade"]
    job_id = queue_obj["job_id"]
    is_batch_job_string = "BATCH" if is_batch_job else "INTERACTIVE"

    partial_path = os.path.join(queue_obj["gradeable"], queue_obj["who"],
                                str(queue_obj["version"]))
    item_name = os.path.join(queue_obj["semester"], queue_obj["course"],
                             "submissions", partial_path)

    grade_items_logging.log_message(job_id, is_batch_job, which_untrusted,
                                    item_name, "wait:", waittime, "")

    # --------------------------------------------------------------------
    # START DOCKER

    # WIP: This option file facilitated testing...
    #USE_DOCKER = os.path.isfile("/tmp/use_docker")
    #use_docker_string="grading begins, using DOCKER" if USE_DOCKER else "grading begins (not using docker)"
    #grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,message=use_docker_string)

    container = None
    if USE_DOCKER:
        container = subprocess.check_output([
            'docker', 'run', '-t', '-d', '-v', tmp + ':' + tmp, 'ubuntu:custom'
        ]).decode('utf8').strip()
        dockerlaunch_done = dateutils.get_current_time()
        dockerlaunch_time = (dockerlaunch_done - grading_began).total_seconds()
        grade_items_logging.log_message(job_id, is_batch_job, which_untrusted,
                                        submission_path, "dcct:",
                                        dockerlaunch_time,
                                        "docker container created")

    # --------------------------------------------------------------------
    # COMPILE THE SUBMITTED CODE

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nCOMPILATION STARTS",
              file=f)

    # copy submitted files to the tmp compilation directory
    tmp_compilation = os.path.join(tmp, "TMP_COMPILATION")
    os.mkdir(tmp_compilation)
    os.chdir(tmp_compilation)

    submission_path = os.path.join(tmp_submission, "submission")
    checkout_path = os.path.join(tmp_submission, "checkout")

    provided_code_path = os.path.join(tmp_autograding, "provided_code")
    test_input_path = os.path.join(tmp_autograding, "test_input")
    test_output_path = os.path.join(tmp_autograding, "test_output")
    custom_validation_code_path = os.path.join(tmp_autograding,
                                               "custom_validation_code")
    bin_path = os.path.join(tmp_autograding, "bin")
    form_json_config = os.path.join(tmp_autograding, "form.json")
    complete_config = os.path.join(tmp_autograding, "complete_config.json")

    with open(form_json_config, 'r') as infile:
        gradeable_config_obj = json.load(infile)
    gradeable_deadline_string = gradeable_config_obj["date_due"]

    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)
    patterns_submission_to_compilation = complete_config_obj["autograding"][
        "submission_to_compilation"]
    pattern_copy("submission_to_compilation",
                 patterns_submission_to_compilation, submission_path,
                 tmp_compilation, tmp_logs)

    is_vcs = gradeable_config_obj["upload_type"] == "repository"
    checkout_subdirectory = complete_config_obj["autograding"].get(
        "use_checkout_subdirectory", "")
    checkout_subdir_path = os.path.join(checkout_path, checkout_subdirectory)

    if is_vcs:
        pattern_copy("checkout_to_compilation",
                     patterns_submission_to_compilation, checkout_subdir_path,
                     tmp_compilation, tmp_logs)

    # copy any instructor provided code files to tmp compilation directory
    copy_contents_into(job_id, provided_code_path, tmp_compilation, tmp_logs)

    subprocess.call(['ls', '-lR', '.'],
                    stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy compile.out to the current directory
    shutil.copy(os.path.join(bin_path, "compile.out"),
                os.path.join(tmp_compilation, "my_compile.out"))

    # give the untrusted user read/write/execute permissions on the tmp directory & files
    add_permissions_recursive(tmp_compilation,
                              stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP,
                              stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP,
                              stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP)

    add_permissions(tmp, stat.S_IROTH | stat.S_IXOTH)
    add_permissions(tmp_logs, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)

    # grab the submission time
    with open(os.path.join(submission_path, ".submit.timestamp"),
              'r') as submission_time_file:
        submission_string = submission_time_file.read().rstrip()

    with open(os.path.join(tmp_logs, "compilation_log.txt"), 'w') as logfile:
        if USE_DOCKER:
            compile_success = subprocess.call([
                'docker', 'exec', '-w', tmp_compilation, container,
                os.path.join(tmp_compilation, 'my_compile.out'),
                queue_obj['gradeable'], queue_obj['who'],
                str(queue_obj['version']), submission_string
            ],
                                              stdout=logfile)
        else:
            compile_success = subprocess.call([
                os.path.join(SUBMITTY_INSTALL_DIR, "sbin",
                             "untrusted_execute"), which_untrusted,
                os.path.join(tmp_compilation, "my_compile.out"),
                queue_obj["gradeable"], queue_obj["who"],
                str(queue_obj["version"]), submission_string
            ],
                                              stdout=logfile)

    if compile_success == 0:
        print(which_machine, which_untrusted, "COMPILATION OK")
    else:
        print(which_machine, which_untrusted, "COMPILATION FAILURE")
        grade_items_logging.log_message(job_id,
                                        is_batch_job,
                                        which_untrusted,
                                        item_name,
                                        message="COMPILATION FAILURE")

    untrusted_grant_rwx_access(which_untrusted, tmp_compilation)

    # remove the compilation program
    os.remove(os.path.join(tmp_compilation, "my_compile.out"))

    # return to the main tmp directory
    os.chdir(tmp)

    # --------------------------------------------------------------------
    # make the runner directory

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nRUNNER STARTS", file=f)

    tmp_work = os.path.join(tmp, "TMP_WORK")
    os.makedirs(tmp_work)
    os.chdir(tmp_work)

    # move all executable files from the compilation directory to the main tmp directory
    # Note: Must preserve the directory structure of compiled files (esp for Java)

    patterns_submission_to_runner = complete_config_obj["autograding"][
        "submission_to_runner"]
    pattern_copy("submission_to_runner", patterns_submission_to_runner,
                 submission_path, tmp_work, tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_runner", patterns_submission_to_runner,
                     checkout_subdir_path, tmp_work, tmp_logs)

    patterns_compilation_to_runner = complete_config_obj["autograding"][
        "compilation_to_runner"]
    pattern_copy("compilation_to_runner", patterns_compilation_to_runner,
                 tmp_compilation, tmp_work, tmp_logs)

    # copy input files to tmp_work directory
    copy_contents_into(job_id, test_input_path, tmp_work, tmp_logs)

    subprocess.call(['ls', '-lR', '.'],
                    stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy runner.out to the current directory
    shutil.copy(os.path.join(bin_path, "run.out"),
                os.path.join(tmp_work, "my_runner.out"))

    # give the untrusted user read/write/execute permissions on the tmp directory & files
    add_permissions_recursive(tmp_work,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    # run the run.out as the untrusted user
    with open(os.path.join(tmp_logs, "runner_log.txt"), 'w') as logfile:
        print("LOGGING BEGIN my_runner.out", file=logfile)
        logfile.flush()

        try:
            if USE_DOCKER:
                runner_success = subprocess.call([
                    'docker', 'exec', '-w', tmp_work, container,
                    os.path.join(tmp_work, 'my_runner.out'),
                    queue_obj['gradeable'], queue_obj['who'],
                    str(queue_obj['version']), submission_string
                ],
                                                 stdout=logfile)
            else:
                runner_success = subprocess.call([
                    os.path.join(SUBMITTY_INSTALL_DIR, "sbin",
                                 "untrusted_execute"), which_untrusted,
                    os.path.join(tmp_work, "my_runner.out"),
                    queue_obj["gradeable"], queue_obj["who"],
                    str(queue_obj["version"]), submission_string
                ],
                                                 stdout=logfile)
            logfile.flush()
        except Exception as e:
            print("ERROR caught runner.out exception={0}".format(str(
                e.args[0])).encode("utf-8"),
                  file=logfile)
            logfile.flush()

        print("LOGGING END my_runner.out", file=logfile)
        logfile.flush()

        killall_success = subprocess.call([
            os.path.join(SUBMITTY_INSTALL_DIR, "sbin", "untrusted_execute"),
            which_untrusted,
            os.path.join(SUBMITTY_INSTALL_DIR, "sbin", "killall.py")
        ],
                                          stdout=logfile)

        print("KILLALL COMPLETE my_runner.out", file=logfile)
        logfile.flush()

        if killall_success != 0:
            msg = 'RUNNER ERROR: had to kill {} process(es)'.format(
                killall_success)
            print("pid", os.getpid(), msg)
            grade_items_logging.log_message(job_id, is_batch_job,
                                            which_untrusted, item_name, "", "",
                                            msg)

    if runner_success == 0:
        print(which_machine, which_untrusted, "RUNNER OK")
    else:
        print(which_machine, which_untrusted, "RUNNER FAILURE")
        grade_items_logging.log_message(job_id,
                                        is_batch_job,
                                        which_untrusted,
                                        item_name,
                                        message="RUNNER FAILURE")

    untrusted_grant_rwx_access(which_untrusted, tmp_work)
    untrusted_grant_rwx_access(which_untrusted, tmp_compilation)

    # --------------------------------------------------------------------
    # RUN VALIDATOR

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nVALIDATION STARTS",
              file=f)

    # copy results files from compilation...
    patterns_submission_to_validation = complete_config_obj["autograding"][
        "submission_to_validation"]
    pattern_copy("submission_to_validation", patterns_submission_to_validation,
                 submission_path, tmp_work, tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_validation",
                     patterns_submission_to_validation, checkout_subdir_path,
                     tmp_work, tmp_logs)
    patterns_compilation_to_validation = complete_config_obj["autograding"][
        "compilation_to_validation"]
    pattern_copy("compilation_to_validation",
                 patterns_compilation_to_validation, tmp_compilation, tmp_work,
                 tmp_logs)

    # remove the compilation directory
    shutil.rmtree(tmp_compilation)

    # copy output files to tmp_work directory
    copy_contents_into(job_id, test_output_path, tmp_work, tmp_logs)

    # copy any instructor custom validation code into the tmp work directory
    copy_contents_into(job_id, custom_validation_code_path, tmp_work, tmp_logs)

    subprocess.call(['ls', '-lR', '.'],
                    stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy validator.out to the current directory
    shutil.copy(os.path.join(bin_path, "validate.out"),
                os.path.join(tmp_work, "my_validator.out"))

    # give the untrusted user read/write/execute permissions on the tmp directory & files
    add_permissions_recursive(tmp_work,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    add_permissions(os.path.join(tmp_work, "my_validator.out"),
                    stat.S_IROTH | stat.S_IXOTH)

    # validator the validator.out as the untrusted user
    with open(os.path.join(tmp_logs, "validator_log.txt"), 'w') as logfile:
        if USE_DOCKER:
            validator_success = subprocess.call([
                'docker', 'exec', '-w', tmp_work, container,
                os.path.join(tmp_work, 'my_validator.out'),
                queue_obj['gradeable'], queue_obj['who'],
                str(queue_obj['version']), submission_string
            ],
                                                stdout=logfile)
        else:
            validator_success = subprocess.call([
                os.path.join(SUBMITTY_INSTALL_DIR, "sbin",
                             "untrusted_execute"), which_untrusted,
                os.path.join(tmp_work, "my_validator.out"),
                queue_obj["gradeable"], queue_obj["who"],
                str(queue_obj["version"]), submission_string
            ],
                                                stdout=logfile)

    if validator_success == 0:
        print(which_machine, which_untrusted, "VALIDATOR OK")
    else:
        print(which_machine, which_untrusted, "VALIDATOR FAILURE")
        grade_items_logging.log_message(job_id,
                                        is_batch_job,
                                        which_untrusted,
                                        item_name,
                                        message="VALIDATION FAILURE")

    untrusted_grant_rwx_access(which_untrusted, tmp_work)

    # grab the result of autograding
    grade_result = ""
    with open(os.path.join(tmp_work, "grade.txt")) as f:
        lines = f.readlines()
        for line in lines:
            line = line.rstrip('\n')
            if line.startswith("Automatic grading total:"):
                grade_result = line

    # --------------------------------------------------------------------
    # MAKE RESULTS DIRECTORY & COPY ALL THE FILES THERE
    tmp_results = os.path.join(tmp, "TMP_RESULTS")

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nARCHIVING STARTS", file=f)

    subprocess.call(['ls', '-lR', '.'],
                    stdout=open(tmp_logs + "/overall.txt", 'a'))

    os.makedirs(os.path.join(tmp_results, "details"))

    patterns_work_to_details = complete_config_obj["autograding"][
        "work_to_details"]
    pattern_copy("work_to_details", patterns_work_to_details, tmp_work,
                 os.path.join(tmp_results, "details"), tmp_logs)

    history_file_tmp = os.path.join(tmp_submission, "history.json")
    history_file = os.path.join(tmp_results, "history.json")
    if os.path.isfile(history_file_tmp):
        shutil.move(history_file_tmp, history_file)
        # fix permissions
        ta_group_id = os.stat(tmp_results).st_gid
        os.chown(history_file, int(HWCRON_UID), ta_group_id)
        add_permissions(history_file, stat.S_IRGRP)
    grading_finished = dateutils.get_current_time()

    shutil.copy(os.path.join(tmp_work, "grade.txt"), tmp_results)

    # -------------------------------------------------------------
    # create/append to the results history

    # grab the submission time
    with open(os.path.join(submission_path,
                           ".submit.timestamp")) as submission_time_file:
        submission_string = submission_time_file.read().rstrip()
    submission_datetime = dateutils.read_submitty_date(submission_string)

    gradeable_deadline_datetime = dateutils.read_submitty_date(
        gradeable_deadline_string)
    gradeable_deadline_longstring = dateutils.write_submitty_date(
        gradeable_deadline_datetime)
    submission_longstring = dateutils.write_submitty_date(submission_datetime)

    seconds_late = int(
        (submission_datetime - gradeable_deadline_datetime).total_seconds())
    # note: negative = not late

    with open(os.path.join(tmp_submission, ".grading_began"), 'r') as f:
        grading_began_longstring = f.read()
    grading_began = dateutils.read_submitty_date(grading_began_longstring)
    grading_finished_longstring = dateutils.write_submitty_date(
        grading_finished)

    gradingtime = (grading_finished - grading_began).total_seconds()

    with open(os.path.join(tmp_submission, "queue_file.json"), 'r') as infile:
        queue_obj = json.load(infile)
    queue_obj["gradingtime"] = gradingtime
    queue_obj["grade_result"] = grade_result
    queue_obj["which_untrusted"] = which_untrusted

    with open(os.path.join(tmp_results, "queue_file.json"), 'w') as outfile:
        json.dump(queue_obj,
                  outfile,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '))

    with open(os.path.join(tmp_work, "results.json"), 'r') as read_file:
        results_obj = json.load(read_file)
    if 'revision' in queue_obj.keys():
        results_obj['revision'] = queue_obj['revision']
    with open(os.path.join(tmp_results, "results.json"), 'w') as outfile:
        json.dump(results_obj,
                  outfile,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '))

    write_grade_history.just_write_grade_history(
        history_file, gradeable_deadline_longstring, submission_longstring,
        seconds_late, queue_time_longstring, is_batch_job_string,
        grading_began_longstring, int(waittime), grading_finished_longstring,
        int(gradingtime), grade_result)

    os.chdir(SUBMITTY_DATA_DIR)

    if USE_DOCKER:
        with open(os.path.join(tmp_logs, "overall_log.txt"), 'w') as logfile:
            chmod_success = subprocess.call([
                'docker', 'exec', '-w', tmp_work, container, 'chmod', '-R',
                'o+rwx', '.'
            ],
                                            stdout=logfile)

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        f.write("FINISHED GRADING!\n")

    # save the logs!
    shutil.copytree(tmp_logs, os.path.join(tmp_results, "logs"))

    # zip up results folder
    filehandle, my_results_zip_file = tempfile.mkstemp()
    zip_my_directory(tmp_results, my_results_zip_file)
    os.close(filehandle)
    shutil.rmtree(tmp_autograding)
    shutil.rmtree(tmp_submission)
    shutil.rmtree(tmp_results)
    shutil.rmtree(tmp_work)
    shutil.rmtree(tmp)

    # WIP: extra logging for testing
    #grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,message="done grading")

    # --------------------------------------------------------------------
    # CLEAN UP DOCKER
    if USE_DOCKER:
        subprocess.call(['docker', 'rm', '-f', container])
        dockerdestroy_done = dateutils.get_current_time()
        dockerdestroy_time = (dockerdestroy_done -
                              grading_finished).total_seconds()
        grade_items_logging.log_message(job_id, is_batch_job, which_untrusted,
                                        submission_path, "ddt:",
                                        dockerdestroy_time,
                                        "docker container destroyed")

    grade_items_logging.log_message(job_id, is_batch_job, which_untrusted,
                                    item_name, "grade:", gradingtime,
                                    grade_result)

    return my_results_zip_file
예제 #33
0
def grade_from_zip(my_autograding_zip_file,my_submission_zip_file,which_untrusted):

    os.chdir(SUBMITTY_DATA_DIR)
    tmp = os.path.join("/var/local/submitty/autograding_tmp/",which_untrusted,"tmp")

    if os.path.exists(tmp):
        untrusted_grant_rwx_access(which_untrusted, tmp)
        add_permissions_recursive(tmp,
                  stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                  stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                  stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    # Remove any and all containers left over from past runs.
    old_containers = subprocess.check_output(['docker', 'ps', '-aq', '-f', 'name={0}'.format(which_untrusted)]).split()

    for old_container in old_containers:
        subprocess.call(['docker', 'rm', '-f', old_container.decode('utf8')])

    # clean up old usage of this directory
    shutil.rmtree(tmp,ignore_errors=True)
    os.mkdir(tmp)

    which_machine=socket.gethostname()

    # unzip autograding and submission folders
    tmp_autograding = os.path.join(tmp,"TMP_AUTOGRADING")
    tmp_submission = os.path.join(tmp,"TMP_SUBMISSION")
    try:
        unzip_this_file(my_autograding_zip_file,tmp_autograding)
        unzip_this_file(my_submission_zip_file,tmp_submission)
    except:
        raise
    os.remove(my_autograding_zip_file)
    os.remove(my_submission_zip_file)

    tmp_logs = os.path.join(tmp,"TMP_SUBMISSION","tmp_logs")

    queue_file = os.path.join(tmp_submission,"queue_file.json")
    with open(queue_file, 'r') as infile:
        queue_obj = json.load(infile)

    queue_time_longstring = queue_obj["queue_time"]
    waittime = queue_obj["waittime"]
    is_batch_job = queue_obj["regrade"]
    job_id = queue_obj["job_id"]
    is_batch_job_string = "BATCH" if is_batch_job else "INTERACTIVE"
    revision = queue_obj.get("revision", None)

    partial_path = os.path.join(queue_obj["gradeable"],queue_obj["who"],str(queue_obj["version"]))
    item_name = os.path.join(queue_obj["semester"],queue_obj["course"],"submissions",partial_path)

    grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,"wait:",waittime,"")

    with open(os.path.join(tmp_submission,".grading_began"), 'r') as f:
        grading_began_longstring = f.read()
    grading_began = dateutils.read_submitty_date(grading_began_longstring)

    submission_path = os.path.join(tmp_submission, "submission")
    checkout_path = os.path.join(tmp_submission, "checkout")

    provided_code_path = os.path.join(tmp_autograding, "provided_code")
    test_input_path = os.path.join(tmp_autograding, "test_input")
    test_output_path = os.path.join(tmp_autograding, "test_output")
    custom_validation_code_path = os.path.join(tmp_autograding, "custom_validation_code")
    bin_path = os.path.join(tmp_autograding, "bin")
    form_json_config = os.path.join(tmp_autograding, "form.json")
    complete_config = os.path.join(tmp_autograding, "complete_config.json")

    with open(form_json_config, 'r') as infile:
        gradeable_config_obj = json.load(infile)
    gradeable_deadline_string = gradeable_config_obj["date_due"]

    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    is_vcs = gradeable_config_obj["upload_type"] == "repository"
    checkout_subdirectory = complete_config_obj["autograding"].get("use_checkout_subdirectory","")
    checkout_subdir_path = os.path.join(checkout_path, checkout_subdirectory)

    if complete_config_obj.get('one_part_only', False):
        allow_only_one_part(submission_path, os.path.join(tmp_logs, "overall.txt"))
        if is_vcs:
            with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
                print("WARNING:  ONE_PART_ONLY OPTION DOES NOT MAKE SENSE WITH VCS SUBMISSION", file=f)


    # --------------------------------------------------------------------
    # START DOCKER

    # NOTE: DOCKER SUPPORT PRELIMINARY -- NEEDS MORE SECURITY BEFORE DEPLOYED ON LIVE SERVER
    complete_config = os.path.join(tmp_autograding,"complete_config.json")
    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    # Save ourselves if autograding_method is None.
    autograding_method = complete_config_obj.get("autograding_method", "")
    USE_DOCKER = True if autograding_method == "docker" else False

    # --------------------------------------------------------------------
    # COMPILE THE SUBMITTED CODE

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nCOMPILATION STARTS", file=f)
    
    # copy submitted files to the tmp compilation directory
    tmp_compilation = os.path.join(tmp,"TMP_COMPILATION")
    os.mkdir(tmp_compilation)
    os.chdir(tmp_compilation)

    submission_path = os.path.join(tmp_submission,"submission")
    checkout_path = os.path.join(tmp_submission,"checkout")

    provided_code_path = os.path.join(tmp_autograding,"provided_code")
    test_input_path = os.path.join(tmp_autograding,"test_input")
    test_output_path = os.path.join(tmp_autograding,"test_output")
    custom_validation_code_path = os.path.join(tmp_autograding,"custom_validation_code")
    bin_path = os.path.join(tmp_autograding,"bin")
    form_json_config = os.path.join(tmp_autograding,"form.json")


    with open(form_json_config, 'r') as infile:
        gradeable_config_obj = json.load(infile)
    gradeable_deadline_string = gradeable_config_obj["date_due"]
    
    patterns_submission_to_compilation = complete_config_obj["autograding"]["submission_to_compilation"]

    add_permissions(tmp_logs,stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)

    if USE_DOCKER:
        print("!!!!!!!!!!!!!!!!!!USING DOCKER!!!!!!!!!!!!!!!!!!!!!!!!")

    with open(complete_config, 'r') as infile:
        config = json.load(infile)
        my_testcases = config['testcases']

    # grab the submission time
    with open(os.path.join(submission_path,".submit.timestamp"), 'r') as submission_time_file:
        submission_string = submission_time_file.read().rstrip()

    with open(os.path.join(tmp_logs,"compilation_log.txt"), 'w') as logfile:
        # we start counting from one.
        executable_path_list = list()
        for testcase_num in range(1, len(my_testcases)+1):
            testcase_folder = os.path.join(tmp_compilation, "test{:02}".format(testcase_num))

            if 'type' in my_testcases[testcase_num-1]:
                if my_testcases[testcase_num-1]['type'] != 'FileCheck' and my_testcases[testcase_num-1]['type'] != 'Compilation':
                    continue

                if my_testcases[testcase_num-1]['type'] == 'Compilation':
                    if 'executable_name' in my_testcases[testcase_num-1]:
                        provided_executable_list = my_testcases[testcase_num-1]['executable_name']
                        if not isinstance(provided_executable_list, (list,)):
                            provided_executable_list = list([provided_executable_list])
                        for executable_name in provided_executable_list:
                            if executable_name.strip() == '':
                                continue
                            executable_path = os.path.join(testcase_folder, executable_name)
                            executable_path_list.append((executable_path, executable_name))
            else:
                continue

            os.makedirs(testcase_folder)
            
            pattern_copy("submission_to_compilation",patterns_submission_to_compilation,submission_path,testcase_folder,tmp_logs)

            if is_vcs:
                pattern_copy("checkout_to_compilation",patterns_submission_to_compilation,checkout_subdir_path,testcase_folder,tmp_logs)

            # copy any instructor provided code files to tmp compilation directory
            copy_contents_into(job_id,provided_code_path,testcase_folder,tmp_logs)
            
            # copy compile.out to the current directory
            shutil.copy (os.path.join(bin_path,"compile.out"),os.path.join(testcase_folder,"my_compile.out"))
            add_permissions(os.path.join(testcase_folder,"my_compile.out"), stat.S_IXUSR | stat.S_IXGRP |stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
            #untrusted_grant_rwx_access(which_untrusted, tmp_compilation)          
            untrusted_grant_rwx_access(which_untrusted, testcase_folder)
            add_permissions_recursive(testcase_folder,
                      stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                      stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                      stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

            if USE_DOCKER:
                try:
                    #There can be only one container for a compilation step, so grab its container image
                    #TODO: set default in load_config_json.cpp
                    if my_testcases[testcase_num-1]['type'] == 'FileCheck':
                        print("performing filecheck in default ubuntu:custom container")
                        container_image = "ubuntu:custom"
                    else:
                        container_image = my_testcases[testcase_num-1]["containers"][0]["container_image"]
                        print('creating a compilation container with image {0}'.format(container_image))
                    untrusted_uid = str(getpwnam(which_untrusted).pw_uid)

                    compilation_container = None
                    compilation_container = subprocess.check_output(['docker', 'create', '-i', '-u', untrusted_uid, '--network', 'none',
                                               '-v', testcase_folder + ':' + testcase_folder,
                                               '-w', testcase_folder,
                                               container_image,
                                               #The command to be run.
                                               os.path.join(testcase_folder, 'my_compile.out'), 
                                               queue_obj['gradeable'],
                                               queue_obj['who'], 
                                               str(queue_obj['version']), 
                                               submission_string, 
                                               '--testcase', str(testcase_num)
                                               ]).decode('utf8').strip()
                    print("starting container")
                    compile_success = subprocess.call(['docker', 'start', '-i', compilation_container],
                                                   stdout=logfile,
                                                   cwd=testcase_folder)
                except Exception as e:
                    print('An error occurred when compiling with docker.')
                    grade_items_logging.log_stack_trace(job_id,is_batch_job,which_untrusted,item_name,trace=traceback.format_exc())
                finally:
                    if compilation_container != None:
                        subprocess.call(['docker', 'rm', '-f', compilation_container])
                        print("cleaned up compilation container.")
            else:
                compile_success = subprocess.call([os.path.join(SUBMITTY_INSTALL_DIR, "sbin", "untrusted_execute"),
                                                   which_untrusted,
                                                   os.path.join(testcase_folder,"my_compile.out"),
                                                   queue_obj["gradeable"],
                                                   queue_obj["who"],
                                                   str(queue_obj["version"]),
                                                   submission_string,
                                                   '--testcase', str(testcase_num)],
                                                   stdout=logfile, 
                                                   cwd=testcase_folder)
            # remove the compilation program
            untrusted_grant_rwx_access(which_untrusted, testcase_folder)
            os.remove(os.path.join(testcase_folder,"my_compile.out"))

    if compile_success == 0:
        print (which_machine,which_untrusted,"COMPILATION OK")
    else:
        print (which_machine,which_untrusted,"COMPILATION FAILURE")
        grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,message="COMPILATION FAILURE")
    add_permissions_recursive(tmp_compilation,
                      stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                      stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                      stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)


    # return to the main tmp directory
    os.chdir(tmp)


    # --------------------------------------------------------------------
    # make the runner directory

    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        print ("====================================\nRUNNER STARTS", file=f)
        
    tmp_work = os.path.join(tmp,"TMP_WORK")
    tmp_work_test_input = os.path.join(tmp_work, "test_input")
    tmp_work_submission = os.path.join(tmp_work, "submitted_files")
    tmp_work_compiled = os.path.join(tmp_work, "compiled_files")
    tmp_work_checkout = os.path.join(tmp_work, "checkout")
    
    os.mkdir(tmp_work)

    os.mkdir(tmp_work_test_input)
    os.mkdir(tmp_work_submission)
    os.mkdir(tmp_work_compiled)
    os.mkdir(tmp_work_checkout)

    os.chdir(tmp_work)

    # move all executable files from the compilation directory to the main tmp directory
    # Note: Must preserve the directory structure of compiled files (esp for Java)

    patterns_submission_to_runner = complete_config_obj["autograding"]["submission_to_runner"]

    pattern_copy("submission_to_runner",patterns_submission_to_runner,submission_path,tmp_work_submission,tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_runner",patterns_submission_to_runner,checkout_subdir_path,tmp_work_checkout,tmp_logs)

    # move the compiled files into the tmp_work_compiled directory
    for path, name in executable_path_list:
        if not os.path.isfile(path): 
            continue
        target_path = os.path.join(tmp_work_compiled, name)
        if not os.path.exists(target_path):
            os.makedirs(os.path.dirname(target_path), exist_ok=True)
        shutil.copy(path, target_path)
        print('copied over {0}'.format(target_path))
        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            print('grade_item: copied over {0}'.format(target_path), file=f)

    patterns_compilation_to_runner = complete_config_obj["autograding"]["compilation_to_runner"]
    #copy into the actual tmp_work directory for archiving/validating
    pattern_copy("compilation_to_runner",patterns_compilation_to_runner,tmp_compilation,tmp_work,tmp_logs)
    #copy into tmp_work_compiled, which is provided to each testcase
    # TODO change this as our methodology for declaring testcase dependencies becomes more robust
    pattern_copy("compilation_to_runner",patterns_compilation_to_runner,tmp_compilation,tmp_work_compiled,tmp_logs)

    # copy input files to tmp_work directory
    copy_contents_into(job_id,test_input_path,tmp_work_test_input,tmp_logs)

    subprocess.call(['ls', '-lR', '.'], stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy runner.out to the current directory
    shutil.copy (os.path.join(bin_path,"run.out"),os.path.join(tmp_work,"my_runner.out"))

    #set the appropriate permissions for the newly created directories 
    #TODO replaces commented out code below

    add_permissions(os.path.join(tmp_work,"my_runner.out"), stat.S_IXUSR | stat.S_IXGRP |stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
    add_permissions(tmp_work_submission, stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
    add_permissions(tmp_work_compiled, stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
    add_permissions(tmp_work_checkout, stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    #TODO this is how permissions used to be set. It was removed because of the way it interacts with the sticky bit.
    ## give the untrusted user read/write/execute permissions on the tmp directory & files
    # os.system('ls -al {0}'.format(tmp_work))
    # add_permissions_recursive(tmp_work,
    #                           stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
    #                           stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
    #                           stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    ##################################################################################################
    #call grade_item_main_runner.py
    runner_success = grade_item_main_runner.executeTestcases(complete_config_obj, tmp_logs, tmp_work, queue_obj, submission_string, 
                                                                                    item_name, USE_DOCKER, None, which_untrusted,
                                                                                    job_id, grading_began)
    ##################################################################################################

    if runner_success == 0:
        print (which_machine,which_untrusted, "RUNNER OK")
    else:
        print (which_machine,which_untrusted, "RUNNER FAILURE")
        grade_items_logging.log_message(job_id, is_batch_job, which_untrusted, item_name, message="RUNNER FAILURE")

    add_permissions_recursive(tmp_work,
                          stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                          stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                          stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH) 
    add_permissions_recursive(tmp_compilation,
                          stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                          stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                          stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH) 

    # --------------------------------------------------------------------
    # RUN VALIDATOR
    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        print ("====================================\nVALIDATION STARTS", file=f)

    # copy results files from compilation...
    patterns_submission_to_validation = complete_config_obj["autograding"]["submission_to_validation"]
    pattern_copy("submission_to_validation",patterns_submission_to_validation,submission_path,tmp_work,tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_validation",patterns_submission_to_validation,checkout_subdir_path,tmp_work,tmp_logs)
    patterns_compilation_to_validation = complete_config_obj["autograding"]["compilation_to_validation"]
    pattern_copy("compilation_to_validation",patterns_compilation_to_validation,tmp_compilation,tmp_work,tmp_logs)

    # remove the compilation directory
    shutil.rmtree(tmp_compilation)

    # copy output files to tmp_work directory
    copy_contents_into(job_id,test_output_path,tmp_work,tmp_logs)

    # copy any instructor custom validation code into the tmp work directory
    copy_contents_into(job_id,custom_validation_code_path,tmp_work,tmp_logs)

    subprocess.call(['ls', '-lR', '.'], stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy validator.out to the current directory
    shutil.copy (os.path.join(bin_path,"validate.out"),os.path.join(tmp_work,"my_validator.out"))

    # give the untrusted user read/write/execute permissions on the tmp directory & files
    add_permissions_recursive(tmp_work,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    add_permissions(os.path.join(tmp_work,"my_validator.out"), stat.S_IXUSR | stat.S_IXGRP |stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    #todo remove prints.
    print("VALIDATING")
    # validator the validator.out as the untrusted user
    with open(os.path.join(tmp_logs,"validator_log.txt"), 'w') as logfile:
        if USE_DOCKER:
            # WIP: This option file facilitated testing...
            #USE_DOCKER = os.path.isfile("/tmp/use_docker")
            #use_docker_string="grading begins, using DOCKER" if USE_DOCKER else "grading begins (not using docker)"
            #grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,message=use_docker_string)
            container = subprocess.check_output(['docker', 'run', '-t', '-d',
                                                 '-v', tmp + ':' + tmp,
                                                 'ubuntu:custom']).decode('utf8').strip()
            dockerlaunch_done=dateutils.get_current_time()
            dockerlaunch_time = (dockerlaunch_done-grading_began).total_seconds()
            grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,"dcct:",dockerlaunch_time,"docker container created")

            validator_success = subprocess.call(['docker', 'exec', '-w', tmp_work, container,
                                                 os.path.join(tmp_work, 'my_validator.out'), queue_obj['gradeable'],
                                                 queue_obj['who'], str(queue_obj['version']), submission_string], stdout=logfile)
        else:
            validator_success = subprocess.call([os.path.join(SUBMITTY_INSTALL_DIR,"sbin","untrusted_execute"),
                                                 which_untrusted,
                                                 os.path.join(tmp_work,"my_validator.out"),
                                                 queue_obj["gradeable"],
                                                 queue_obj["who"],
                                                 str(queue_obj["version"]),
                                                 submission_string],
                                                stdout=logfile)

    if validator_success == 0:
        print (which_machine,which_untrusted,"VALIDATOR OK")
    else:
        print (which_machine,which_untrusted,"VALIDATOR FAILURE")
        grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,message="VALIDATION FAILURE")

    untrusted_grant_rwx_access(which_untrusted,tmp_work)

    # grab the result of autograding
    grade_result = ""
    try:
        with open(os.path.join(tmp_work,"grade.txt")) as f:
            lines = f.readlines()
            for line in lines:
                line = line.rstrip('\n')
                if line.startswith("Automatic grading total:"):
                    grade_result = line
    except:
        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            print ("\n\nERROR: Grading incomplete -- Could not open ",os.path.join(tmp_work,"grade.txt"))
            grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,message="ERROR: grade.txt does not exist")
            grade_items_logging.log_stack_trace(job_id,is_batch_job,which_untrusted,item_name,trace=traceback.format_exc())

    # --------------------------------------------------------------------
    # MAKE RESULTS DIRECTORY & COPY ALL THE FILES THERE
    tmp_results = os.path.join(tmp,"TMP_RESULTS")

    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        print ("====================================\nARCHIVING STARTS", file=f)

    subprocess.call(['ls', '-lR', '.'], stdout=open(tmp_logs + "/overall.txt", 'a'))

    os.makedirs(os.path.join(tmp_results,"details"))

    # remove the test_input directory, so we don't archive it!
    shutil.rmtree(os.path.join(tmp_work,"test_input"))

    # loop over the test case directories, and remove any files that are also in the test_input folder
    for testcase_num in range(1, len(my_testcases)+1):
        testcase_folder = os.path.join(tmp_work, "test{:02}".format(testcase_num))
        remove_test_input_files(os.path.join(tmp_logs,"overall.txt"),test_input_path,testcase_folder)

    patterns_work_to_details = complete_config_obj["autograding"]["work_to_details"]
    pattern_copy("work_to_details",patterns_work_to_details,tmp_work,os.path.join(tmp_results,"details"),tmp_logs)

    if ("work_to_public" in complete_config_obj["autograding"] and
        len(complete_config_obj["autograding"]["work_to_public"]) > 0):
        # create the directory
        os.makedirs(os.path.join(tmp_results,"results_public"))
        # copy the files
        patterns_work_to_public = complete_config_obj["autograding"]["work_to_public"]
        pattern_copy("work_to_public",patterns_work_to_public,tmp_work,os.path.join(tmp_results,"results_public"),tmp_logs)

    history_file_tmp = os.path.join(tmp_submission,"history.json")
    history_file = os.path.join(tmp_results,"history.json")
    if os.path.isfile(history_file_tmp):
        shutil.move(history_file_tmp,history_file)
        # fix permissions
        ta_group_id = os.stat(tmp_results).st_gid
        os.chown(history_file,int(DAEMON_UID),ta_group_id)
        add_permissions(history_file,stat.S_IRGRP)
    grading_finished = dateutils.get_current_time()

    try:
        shutil.copy(os.path.join(tmp_work,"grade.txt"),tmp_results)
    except:
        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            print ("\n\nERROR: Grading incomplete -- Could not copy ",os.path.join(tmp_work,"grade.txt"))
        grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,message="ERROR: grade.txt does not exist")
        grade_items_logging.log_stack_trace(job_id,is_batch_job,which_untrusted,item_name,trace=traceback.format_exc())

    # -------------------------------------------------------------
    # create/append to the results history

    # grab the submission time
    with open (os.path.join(submission_path,".submit.timestamp")) as submission_time_file:
        submission_string = submission_time_file.read().rstrip()
    submission_datetime = dateutils.read_submitty_date(submission_string)

    gradeable_deadline_datetime = dateutils.read_submitty_date(gradeable_deadline_string)
    gradeable_deadline_longstring = dateutils.write_submitty_date(gradeable_deadline_datetime)
    submission_longstring = dateutils.write_submitty_date(submission_datetime)
    
    seconds_late = int((submission_datetime-gradeable_deadline_datetime).total_seconds())
    # note: negative = not late

    grading_finished_longstring = dateutils.write_submitty_date(grading_finished)

    gradingtime = (grading_finished-grading_began).total_seconds()

    with open(os.path.join(tmp_submission,"queue_file.json"), 'r') as infile:
        queue_obj = json.load(infile)
    queue_obj["gradingtime"]=gradingtime
    queue_obj["grade_result"]=grade_result
    queue_obj["which_untrusted"]=which_untrusted

    with open(os.path.join(tmp_results,"queue_file.json"),'w') as outfile:
        json.dump(queue_obj,outfile,sort_keys=True,indent=4,separators=(',', ': '))

    try:
        shutil.move(os.path.join(tmp_work, "results.json"), os.path.join(tmp_results, "results.json"))
    except:
        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            print ("\n\nERROR: Grading incomplete -- Could not open/write ",os.path.join(tmp_work,"results.json"))
            grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,message="ERROR: results.json read/write error")
            grade_items_logging.log_stack_trace(job_id,is_batch_job,which_untrusted,item_name,trace=traceback.format_exc())

    write_grade_history.just_write_grade_history(history_file,
                                                 gradeable_deadline_longstring,
                                                 submission_longstring,
                                                 seconds_late,
                                                 queue_time_longstring,
                                                 is_batch_job_string,
                                                 grading_began_longstring,
                                                 int(waittime),
                                                 grading_finished_longstring,
                                                 int(gradingtime),
                                                 grade_result,
                                                 revision)

    os.chdir(SUBMITTY_DATA_DIR)

    if USE_DOCKER:
        with open(os.path.join(tmp_logs,"overall_log.txt"), 'w') as logfile:
            chmod_success = subprocess.call(['docker', 'exec', '-w', tmp_work, container,
                                             'chmod', '-R', 'ugo+rwx', '.'], stdout=logfile)

    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        f.write("FINISHED GRADING!\n")

    # save the logs!
    shutil.copytree(tmp_logs,os.path.join(tmp_results,"logs"))

    # zip up results folder
    filehandle, my_results_zip_file=tempfile.mkstemp()
    zip_my_directory(tmp_results,my_results_zip_file)
    os.close(filehandle)
    shutil.rmtree(tmp_autograding)
    shutil.rmtree(tmp_submission)
    shutil.rmtree(tmp_results)
    shutil.rmtree(tmp_work)
    shutil.rmtree(tmp)

    # WIP: extra logging for testing
    #grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,message="done grading")

    # --------------------------------------------------------------------
    # CLEAN UP DOCKER
    if USE_DOCKER:
        subprocess.call(['docker', 'rm', '-f', container])
        dockerdestroy_done=dateutils.get_current_time()
        dockerdestroy_time = (dockerdestroy_done-grading_finished).total_seconds()
        grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,"ddt:",dockerdestroy_time,"docker container destroyed")
        
    grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,"grade:",gradingtime,grade_result)

    return my_results_zip_file
def worker_process(which_machine,address,which_untrusted,my_server):

    # verify the DAEMON_USER is running this script
    if not int(os.getuid()) == int(DAEMON_UID):
        grade_items_logging.log_message(JOB_ID, message="ERROR: must be run by DAEMON_USER")
        raise SystemExit("ERROR: the grade_item.py script must be run by the DAEMON_USER")

    # ignore keyboard interrupts in the worker processes
    signal.signal(signal.SIGINT, signal.SIG_IGN)
    counter = 0

    servername_workername = "{0}_{1}".format(my_server, address)
    autograding_zip = os.path.join(SUBMITTY_DATA_DIR,"autograding_TODO",servername_workername+"_"+which_untrusted+"_autograding.zip")
    submission_zip = os.path.join(SUBMITTY_DATA_DIR,"autograding_TODO",servername_workername+"_"+which_untrusted+"_submission.zip")
    todo_queue_file = os.path.join(SUBMITTY_DATA_DIR,"autograding_TODO",servername_workername+"_"+which_untrusted+"_queue.json")

    while True:
        if os.path.exists(todo_queue_file):
            try:
                results_zip_tmp = grade_item.grade_from_zip(autograding_zip,submission_zip,which_untrusted)
                results_zip = os.path.join(SUBMITTY_DATA_DIR,"autograding_DONE",servername_workername+"_"+which_untrusted+"_results.zip")
                done_queue_file = os.path.join(SUBMITTY_DATA_DIR,"autograding_DONE",servername_workername+"_"+which_untrusted+"_queue.json")
                #move doesn't inherit the permissions of the destination directory. Copyfile does.
                shutil.copyfile(results_zip_tmp, results_zip)

                os.remove(results_zip_tmp)
                with open(todo_queue_file, 'r') as infile:
                    queue_obj = json.load(infile)
                    queue_obj["done_time"]=dateutils.write_submitty_date(microseconds=True)
                with open(done_queue_file, 'w') as outfile:
                    json.dump(queue_obj, outfile, sort_keys=True, indent=4)        
            except Exception as e:
                grade_items_logging.log_message(JOB_ID, message="ERROR attempting to unzip graded item: " + which_machine + " " + which_untrusted + ". for more details, see traces entry.")
                grade_items_logging.log_stack_trace(JOB_ID,trace=traceback.format_exc())
                with contextlib.suppress(FileNotFoundError):
                    os.remove(autograding_zip)
                with contextlib.suppress(FileNotFoundError):
                    os.remove(submission_zip)

                #Respond with a failure zip file.
                results_zip = os.path.join(SUBMITTY_DATA_DIR,"autograding_DONE",servername_workername+"_"+which_untrusted+"_results.zip")
                tmp_dir = tempfile.mkdtemp()
                with open(os.path.join(tmp_dir, 'failure.txt'), 'w') as outfile:
                    outfile.write("grading failed.\n")

                results_zip_tmp = zipfile.ZipFile(results_zip, 'w')
                results_zip_tmp.write(os.path.join(tmp_dir, 'failure.txt'))
                results_zip_tmp.close()

                shutil.rmtree(tmp_dir)
                done_queue_file = os.path.join(SUBMITTY_DATA_DIR,"autograding_DONE",servername_workername+"_"+which_untrusted+"_queue.json")
                with open(todo_queue_file, 'r') as infile:
                    queue_obj = json.load(infile)
                    queue_obj["done_time"]=dateutils.write_submitty_date(microseconds=True)
                with open(done_queue_file, 'w') as outfile:
                    json.dump(queue_obj, outfile, sort_keys=True, indent=4)

            with contextlib.suppress(FileNotFoundError):
                os.remove(todo_queue_file)
            counter = 0
        else:
            if counter >= 10:
                print (which_machine,which_untrusted,"wait")
                counter = 0
            counter += 1
            time.sleep(1)
예제 #35
0
def prepare_autograding_and_submission_zip(
    which_machine,
    which_untrusted,
    next_directory,
    next_to_grade
):
    os.chdir(SUBMITTY_DATA_DIR)

    # generate a random id to be used to track this job in the autograding logs
    job_id = ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(6))

    # --------------------------------------------------------
    # figure out what we're supposed to grade & error checking
    obj = load_queue_file_obj(job_id, next_directory, next_to_grade)
    # The top level course directory for this class
    course_dir = os.path.join(SUBMITTY_DATA_DIR, 'courses', obj["semester"], obj["course"])
    if "generate_output" not in obj:
        partial_path = os.path.join(obj["gradeable"], obj["who"], str(obj["version"]))
        item_name = os.path.join(obj["semester"], obj["course"], "submissions", partial_path)
        submission_path = os.path.join(SUBMITTY_DATA_DIR, "courses", item_name)
        if not os.path.isdir(submission_path):
            autograding_utils.log_message(
                AUTOGRADING_LOG_PATH,
                job_id,
                message=f"ERROR: the submission directory does not exist {submission_path}"
            )
            raise RuntimeError("ERROR: the submission directory does not exist", submission_path)
        print(which_machine, which_untrusted, "prepare zip", submission_path)
        is_vcs, vcs_type, vcs_base_url, vcs_subdirectory = get_vcs_info(
            SUBMITTY_DATA_DIR,
            obj["semester"],
            obj["course"],
            obj["gradeable"],
            obj["who"],
            obj["team"]
        )

    elif obj["generate_output"]:
        item_name = os.path.join(
            obj["semester"],
            obj["course"],
            "generated_output",
            obj["gradeable"]
        )

    is_batch_job = "regrade" in obj and obj["regrade"]

    queue_time = get_queue_time(next_directory, next_to_grade)
    grading_began = dateutils.get_current_time()
    waittime = (grading_began-queue_time).total_seconds()
    autograding_utils.log_message(
        AUTOGRADING_LOG_PATH,
        job_id,
        is_batch_job,
        "zip",
        item_name,
        "wait:",
        waittime,
        ""
    )

    # --------------------------------------------------------
    # various paths

    provided_code_path = os.path.join(course_dir, "provided_code", obj["gradeable"])
    instructor_solution_path = os.path.join(course_dir, "instructor_solution", obj["gradeable"])
    test_input_path = os.path.join(course_dir, "test_input", obj["gradeable"])
    test_output_path = os.path.join(course_dir, "test_output", obj["gradeable"])
    bin_path = os.path.join(course_dir, "bin", obj["gradeable"])
    form_json_config = os.path.join(course_dir, "config", "form", f"form_{obj['gradeable']}.json")
    custom_validation_code_path = os.path.join(
        course_dir,
        "custom_validation_code",
        obj["gradeable"]
    )
    generated_output_path = os.path.join(
        course_dir,
        "generated_output",
        obj["gradeable"],
        "random_output"
    )
    complete_config = os.path.join(
        course_dir,
        "config",
        "complete_config",
        f"complete_config_{obj['gradeable']}.json"
    )

    if not os.path.exists(form_json_config):
        autograding_utils.log_message(
            AUTOGRADING_LOG_PATH,
            job_id,
            message=f"ERROR: the form json file does not exist {form_json_config}"
        )
        raise RuntimeError(f"ERROR: the form json file does not exist {form_json_config}")

    if not os.path.exists(complete_config):
        autograding_utils.log_message(
            AUTOGRADING_LOG_PATH,
            job_id,
            message=f"ERROR: the complete config file does not exist {complete_config}"
        )
        raise RuntimeError(f"ERROR: the complete config file does not exist {complete_config}")

    # --------------------------------------------------------------------
    # MAKE TEMPORARY DIRECTORY & COPY THE NECESSARY FILES THERE
    tmp = tempfile.mkdtemp()
    tmp_autograding = os.path.join(tmp, "TMP_AUTOGRADING")
    os.mkdir(tmp_autograding)
    tmp_submission = os.path.join(tmp, "TMP_SUBMISSION")
    os.mkdir(tmp_submission)

    copytree_if_exists(provided_code_path, os.path.join(tmp_autograding, "provided_code"))
    copytree_if_exists(test_input_path, os.path.join(tmp_autograding, "test_input"))
    copytree_if_exists(test_output_path, os.path.join(tmp_autograding, "test_output"))
    copytree_if_exists(generated_output_path, os.path.join(tmp_autograding, "generated_output"))
    copytree_if_exists(bin_path, os.path.join(tmp_autograding, "bin"))
    copytree_if_exists(
        instructor_solution_path,
        os.path.join(tmp_autograding, "instructor_solution")
    )
    copytree_if_exists(
        custom_validation_code_path,
        os.path.join(tmp_autograding, "custom_validation_code")
    )

    # Copy the default submitty_router into bin.
    router_path = os.path.join(
        SUBMITTY_INSTALL_DIR,
        'src',
        'grading',
        'python',
        'submitty_router.py'
    )
    shutil.copy(router_path, os.path.join(tmp_autograding, "bin"))
    shutil.copy(form_json_config, os.path.join(tmp_autograding, "form.json"))
    shutil.copy(complete_config, os.path.join(tmp_autograding, "complete_config.json"))

    if "generate_output" not in obj:
        checkout_path = os.path.join(course_dir, "checkout", partial_path)
        results_path = os.path.join(course_dir, "results", partial_path)
    elif obj["generate_output"]:
        results_path = os.path.join(course_dir, "generated_output", obj["gradeable"])

    # grab a copy of the current history.json file (if it exists)
    history_file = os.path.join(results_path, "history.json")
    if os.path.isfile(history_file):
        shutil.copy(history_file, os.path.join(tmp_submission, "history.json"))

    # switch to tmp directory
    os.chdir(tmp)

    # make the logs directory
    tmp_logs = os.path.join(tmp, "TMP_SUBMISSION", "tmp_logs")
    os.makedirs(tmp_logs)
    # 'touch' a file in the logs folder
    open(os.path.join(tmp_logs, "overall.txt"), 'a')

    # --------------------------------------------------------------------
    # CONFIRM WE HAVE A CHECKOUT OF THE STUDENT'S REPO
    if "generate_output" not in obj:
        if is_vcs:
            # there should be a checkout log file in the results directory
            # move that file to the tmp logs directory.
            vcs_checkout_logfile = os.path.join(results_path, "logs", "vcs_checkout.txt")
            if os.path.isfile(vcs_checkout_logfile):
                shutil.move(vcs_checkout_logfile, tmp_logs)
            else:
                autograding_utils.log_message(
                    AUTOGRADING_LOG_PATH,
                    job_id,
                    message=f"ERROR: missing vcs_checkout.txt logfile {str(vcs_checkout_logfile)}"
                )

    if "generate_output" not in obj:
        copytree_if_exists(submission_path, os.path.join(tmp_submission, "submission"))
        copytree_if_exists(checkout_path, os.path.join(tmp_submission, "checkout"))
    obj["queue_time"] = dateutils.write_submitty_date(queue_time)
    obj["regrade"] = is_batch_job
    obj["waittime"] = waittime
    obj["job_id"] = job_id

    with open(os.path.join(tmp_submission, "queue_file.json"), 'w') as outfile:
        json.dump(obj, outfile, sort_keys=True, indent=4, separators=(',', ': '))

    user_assignment_access_json = os.path.join(
        SUBMITTY_DATA_DIR, "courses", obj["semester"], obj["course"],
        "submissions", obj["gradeable"], obj["who"], "user_assignment_access.json")
    user_assignment_settings_json = os.path.join(
        SUBMITTY_DATA_DIR, "courses", obj["semester"], obj["course"],
        "submissions", obj["gradeable"], obj["who"], "user_assignment_settings.json")

    if os.path.exists(user_assignment_access_json):
        shutil.copy(
            user_assignment_access_json,
            os.path.join(tmp_submission, "user_assignment_access.json")
        )
    if os.path.exists(user_assignment_settings_json):
        shutil.copy(
            user_assignment_settings_json,
            os.path.join(tmp_submission, "user_assignment_settings.json")
        )

    grading_began_longstring = dateutils.write_submitty_date(grading_began)
    with open(os.path.join(tmp_submission, ".grading_began"), 'w') as f:
        print(grading_began_longstring, file=f)

    # zip up autograding & submission folders
    filehandle1, my_autograding_zip_file = tempfile.mkstemp()
    filehandle2, my_submission_zip_file = tempfile.mkstemp()
    autograding_utils.zip_my_directory(tmp_autograding, my_autograding_zip_file)
    autograding_utils.zip_my_directory(tmp_submission, my_submission_zip_file)
    os.close(filehandle1)
    os.close(filehandle2)
    # cleanup
    shutil.rmtree(tmp_autograding)
    shutil.rmtree(tmp_submission)
    shutil.rmtree(tmp)

    return (my_autograding_zip_file, my_submission_zip_file)
예제 #36
0
def prepare_autograding_and_submission_zip(next_directory, next_to_grade):
    os.chdir(SUBMITTY_DATA_DIR)
    # --------------------------------------------------------
    # figure out what we're supposed to grade & error checking
    obj = load_queue_file_obj(next_directory, next_to_grade)

    partial_path = os.path.join(obj["gradeable"], obj["who"],
                                str(obj["version"]))
    item_name = os.path.join(obj["semester"], obj["course"], "submissions",
                             partial_path)
    submission_path = os.path.join(SUBMITTY_DATA_DIR, "courses", item_name)
    if not os.path.isdir(submission_path):
        grade_items_logging.log_message(
            message="ERROR: the submission directory does not exist" +
            submission_path)
        raise RuntimeError("ERROR: the submission directory does not exist",
                           submission_path)
    print("pid", os.getpid(), "GRADE THIS", submission_path)
    is_vcs, vcs_type, vcs_base_url, vcs_subdirectory = get_vcs_info(
        SUBMITTY_DATA_DIR, obj["semester"], obj["course"], obj["gradeable"],
        obj["who"], obj["team"])

    is_batch_job = next_directory == BATCH_QUEUE
    is_batch_job_string = "BATCH" if is_batch_job else "INTERACTIVE"

    queue_time = get_queue_time(next_directory, next_to_grade)
    queue_time_longstring = dateutils.write_submitty_date(queue_time)
    grading_began = dateutils.get_current_time()
    waittime = (grading_began - queue_time).total_seconds()
    grade_items_logging.log_message(is_batch_job, "zip", item_name, "wait:",
                                    waittime, "")

    # --------------------------------------------------------------------
    # MAKE TEMPORARY DIRECTORY & COPY THE NECESSARY FILES THERE

    tmp = tempfile.mkdtemp()
    tmp_autograding = os.path.join(tmp, "TMP_AUTOGRADING")
    os.mkdir(tmp_autograding)
    tmp_submission = os.path.join(tmp, "TMP_SUBMISSION")
    os.mkdir(tmp_submission)

    # --------------------------------------------------------
    # various paths
    provided_code_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                      obj["semester"], obj["course"],
                                      "provided_code", obj["gradeable"])
    test_input_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                   obj["semester"], obj["course"],
                                   "test_input", obj["gradeable"])
    test_output_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                    obj["semester"], obj["course"],
                                    "test_output", obj["gradeable"])
    custom_validation_code_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                               obj["semester"], obj["course"],
                                               "custom_validation_code",
                                               obj["gradeable"])
    bin_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                            obj["course"], "bin", obj["gradeable"])
    form_json_config = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                    obj["semester"], obj["course"], "config",
                                    "form",
                                    "form_" + obj["gradeable"] + ".json")
    complete_config = os.path.join(
        SUBMITTY_DATA_DIR, "courses", obj["semester"], obj["course"], "config",
        "complete_config", "complete_config_" + obj["gradeable"] + ".json")

    copytree_if_exists(provided_code_path,
                       os.path.join(tmp_autograding, "provided_code"))
    copytree_if_exists(test_input_path,
                       os.path.join(tmp_autograding, "test_input"))
    copytree_if_exists(test_output_path,
                       os.path.join(tmp_autograding, "test_output"))
    copytree_if_exists(custom_validation_code_path,
                       os.path.join(tmp_autograding, "custom_validation_code"))
    copytree_if_exists(bin_path, os.path.join(tmp_autograding, "bin"))
    shutil.copy(form_json_config, os.path.join(tmp_autograding, "form.json"))
    shutil.copy(complete_config,
                os.path.join(tmp_autograding, "complete_config.json"))

    checkout_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                                 obj["course"], "checkout", partial_path)
    results_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                                obj["course"], "results", partial_path)

    # grab a copy of the current history.json file (if it exists)
    history_file = os.path.join(results_path, "history.json")
    history_file_tmp = ""
    if os.path.isfile(history_file):
        filehandle, history_file_tmp = tempfile.mkstemp()
        shutil.copy(history_file, history_file_tmp)
        shutil.copy(history_file, os.path.join(tmp_submission, "history.json"))

    # get info from the gradeable config file
    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    checkout_subdirectory = complete_config_obj["autograding"].get(
        "use_checkout_subdirectory", "")
    checkout_subdir_path = os.path.join(checkout_path, checkout_subdirectory)
    queue_file = os.path.join(next_directory, next_to_grade)

    # switch to tmp directory
    os.chdir(tmp)

    # make the logs directory
    tmp_logs = os.path.join(tmp, "TMP_SUBMISSION", "tmp_logs")
    os.makedirs(tmp_logs)
    # 'touch' a file in the logs folder
    open(os.path.join(tmp_logs, "overall.txt"), 'a')

    # grab the submission time
    with open(os.path.join(submission_path,
                           ".submit.timestamp")) as submission_time_file:
        submission_string = submission_time_file.read().rstrip()

    submission_datetime = dateutils.read_submitty_date(submission_string)

    # --------------------------------------------------------------------
    # CHECKOUT THE STUDENT's REPO
    if is_vcs:
        # is vcs_subdirectory standalone or should it be combined with base_url?
        if vcs_subdirectory[0] == '/' or '://' in vcs_subdirectory:
            vcs_path = vcs_subdirectory
        else:
            if '://' in vcs_base_url:
                vcs_path = urllib.parse.urljoin(vcs_base_url, vcs_subdirectory)
            else:
                vcs_path = os.path.join(vcs_base_url, vcs_subdirectory)

        with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
            print("====================================\nVCS CHECKOUT", file=f)
            print('vcs_base_url', vcs_base_url, file=f)
            print('vcs_subdirectory', vcs_subdirectory, file=f)
            print('vcs_path', vcs_path, file=f)
            print(['/usr/bin/git', 'clone', vcs_path, checkout_path], file=f)

        # cleanup the previous checkout (if it exists)
        shutil.rmtree(checkout_path, ignore_errors=True)
        os.makedirs(checkout_path, exist_ok=True)
        subprocess.call(['/usr/bin/git', 'clone', vcs_path, checkout_path])
        os.chdir(checkout_path)

        # determine which version we need to checkout
        what_version = subprocess.check_output([
            'git', 'rev-list', '-n', '1',
            '--before="' + submission_string + '"', 'master'
        ])
        what_version = str(what_version.decode('utf-8')).rstrip()
        if what_version == "":
            # oops, pressed the grade button before a valid commit
            shutil.rmtree(checkout_path, ignore_errors=True)
        else:
            # and check out the right version
            subprocess.call(['git', 'checkout', '-b', 'grade', what_version])
        os.chdir(tmp)
        subprocess.call(['ls', '-lR', checkout_path],
                        stdout=open(tmp_logs + "/overall.txt", 'a'))

    copytree_if_exists(submission_path,
                       os.path.join(tmp_submission, "submission"))
    copytree_if_exists(checkout_path, os.path.join(tmp_submission, "checkout"))
    obj["queue_time"] = queue_time_longstring
    obj["is_batch_job"] = is_batch_job
    obj["waittime"] = waittime

    with open(os.path.join(tmp_submission, "queue_file.json"), 'w') as outfile:
        json.dump(obj,
                  outfile,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '))

    grading_began_longstring = dateutils.write_submitty_date(grading_began)
    with open(os.path.join(tmp_submission, ".grading_began"), 'w') as f:
        print(grading_began_longstring, file=f)

    # zip up autograding & submission folders
    my_autograding_zip_file = tempfile.mkstemp()[1]
    my_submission_zip_file = tempfile.mkstemp()[1]
    zip_my_directory(tmp_autograding, my_autograding_zip_file)
    zip_my_directory(tmp_submission, my_submission_zip_file)
    # cleanup
    shutil.rmtree(tmp_autograding)
    shutil.rmtree(tmp_submission)
    shutil.rmtree(tmp)

    return (my_autograding_zip_file, my_submission_zip_file)
예제 #37
0
def archive_autograding_results(working_directory, job_id, which_untrusted, is_batch_job, complete_config_obj, 
                                gradeable_config_obj, queue_obj, log_path, stack_trace_log_path, is_test_environment):
    """ After grading is finished, archive the results. """

    tmp_autograding = os.path.join(working_directory,"TMP_AUTOGRADING")
    tmp_submission = os.path.join(working_directory,"TMP_SUBMISSION")
    tmp_work = os.path.join(working_directory,"TMP_WORK")
    tmp_logs = os.path.join(working_directory,"TMP_SUBMISSION","tmp_logs")
    tmp_results = os.path.join(working_directory,"TMP_RESULTS")
    submission_path = os.path.join(tmp_submission, "submission")
    random_output_path = os.path.join(tmp_work, 'random_output')

    partial_path = os.path.join(queue_obj["gradeable"],queue_obj["who"],str(queue_obj["version"]))
    item_name = os.path.join(queue_obj["semester"],queue_obj["course"],"submissions",partial_path)
    results_public_dir = os.path.join(tmp_results,"results_public")
    results_details_dir = os.path.join(tmp_results, "details")
    patterns = complete_config_obj['autograding']

    # Copy work to details
    pattern_copy("work_to_details", patterns['work_to_details'], tmp_work, results_details_dir, tmp_logs)
    
    # Copy work to public
    if 'work_to_public' in patterns:
        pattern_copy("work_to_public", patterns['work_to_public'], tmp_work, results_public_dir, tmp_logs)

    if os.path.exists(random_output_path):
        pattern_copy("work_to_random_output", [os.path.join(random_output_path, 'test*', '**', '*.txt'),], tmp_work, tmp_results, tmp_logs)
    # grab the submission time
    with open(os.path.join(tmp_submission, 'submission' ,".submit.timestamp"), 'r') as submission_time_file:
        submission_string = submission_time_file.read().rstrip()

    history_file_tmp = os.path.join(tmp_submission,"history.json")
    history_file = os.path.join(tmp_results,"history.json")
    if os.path.isfile(history_file_tmp) and not is_test_environment:

        from . import CONFIG_PATH
        with open(os.path.join(CONFIG_PATH, 'submitty_users.json')) as open_file:
            OPEN_JSON = json.load(open_file)
        DAEMON_UID = OPEN_JSON['daemon_uid']

        shutil.move(history_file_tmp, history_file)
        # fix permissions
        ta_group_id = os.stat(tmp_results).st_gid
        os.chown(history_file, int(DAEMON_UID),ta_group_id)
        add_permissions(history_file, stat.S_IRGRP)
    grading_finished = dateutils.get_current_time()


    try:
        shutil.copy(os.path.join(tmp_work, "grade.txt"), tmp_results)
    except:
        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            print ("\n\nERROR: Grading incomplete -- Could not copy ",os.path.join(tmp_work,"grade.txt"))
        log_message(log_path, job_id, is_batch_job, which_untrusted, item_name, message="ERROR: grade.txt does not exist")
        log_stack_trace(stack_trace_log_path, job_id, is_batch_job, which_untrusted, item_name, trace=traceback.format_exc())

    grade_result = ""
    try:
        with open(os.path.join(tmp_work,"grade.txt")) as f:
            lines = f.readlines()
            for line in lines:
                line = line.rstrip('\n')
                if line.startswith("Automatic grading total:"):
                    grade_result = line
    except:
        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            print ("\n\nERROR: Grading incomplete -- Could not open ",os.path.join(tmp_work,"grade.txt"))
            log_message(job_id,is_batch_job,which_untrusted,item_name,message="ERROR: grade.txt does not exist")
            log_stack_trace(job_id,is_batch_job,which_untrusted,item_name,trace=traceback.format_exc())


    gradeable_deadline_string = gradeable_config_obj["date_due"]
    
    submission_datetime = dateutils.read_submitty_date(submission_string)
    gradeable_deadline_datetime = dateutils.read_submitty_date(gradeable_deadline_string)
    gradeable_deadline_longstring = dateutils.write_submitty_date(gradeable_deadline_datetime)
    submission_longstring = dateutils.write_submitty_date(submission_datetime)
    seconds_late = int((submission_datetime-gradeable_deadline_datetime).total_seconds())

    # note: negative = not late
    grading_finished_longstring = dateutils.write_submitty_date(grading_finished)

    with open(os.path.join(tmp_submission,".grading_began"), 'r') as f:
        grading_began_longstring = f.read()
    grading_began = dateutils.read_submitty_date(grading_began_longstring)

    gradingtime = (grading_finished - grading_began).total_seconds()

    queue_obj["gradingtime"]=gradingtime
    queue_obj["grade_result"]=grade_result
    queue_obj["which_untrusted"]=which_untrusted
    waittime = queue_obj["waittime"]

    with open(os.path.join(tmp_results,"queue_file.json"),'w') as outfile:
        json.dump(queue_obj,outfile,sort_keys=True,indent=4,separators=(',', ': '))

    try:
        shutil.move(os.path.join(tmp_work, "results.json"), os.path.join(tmp_results, "results.json"))
    except:
        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            print ("\n\nERROR: Grading incomplete -- Could not open/write ",os.path.join(tmp_work,"results.json"))
            log_message(log_path, job_id,is_batch_job,which_untrusted,item_name,message="ERROR: results.json read/write error")
            log_stack_trace(stack_trace_log_path, job_id,is_batch_job,which_untrusted,item_name,trace=traceback.format_exc())

    just_write_grade_history(history_file,
                             gradeable_deadline_longstring,
                             submission_longstring,
                             seconds_late,
                             queue_obj["queue_time"],
                             "BATCH" if is_batch_job else "INTERACTIVE",
                             grading_began_longstring,
                             int(waittime),
                             grading_finished_longstring,
                             int(gradingtime),
                             grade_result,
                             queue_obj.get("revision", None))

    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        f.write("FINISHED GRADING!\n")

    # save the logs!
    shutil.copytree(tmp_logs,os.path.join(tmp_results,"logs"))
    log_message(log_path, job_id,is_batch_job,which_untrusted,item_name,"grade:",gradingtime,grade_result)
예제 #38
0
def just_grade_item(next_directory,next_to_grade,which_untrusted):

    my_pid = os.getpid()

    # verify the hwcron user is running this script
    if not int(os.getuid()) == int(HWCRON_UID):
        grade_items_logging.log_message("ERROR: must be run by hwcron")
        raise SystemExit("ERROR: the grade_item.py script must be run by the hwcron user")

    # --------------------------------------------------------
    # figure out what we're supposed to grade & error checking
    obj = get_submission_path(next_directory,next_to_grade)
    submission_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],
                                   "submissions",obj["gradeable"],obj["who"],str(obj["version"]))
    if not os.path.isdir(submission_path):
        grade_items_logging.log_message("ERROR: the submission directory does not exist" + submission_path)
        raise SystemExit("ERROR: the submission directory does not exist",submission_path)
    print ("pid",my_pid,"GRADE THIS", submission_path)

    is_vcs,vcs_type,vcs_base_url,vcs_subdirectory = get_vcs_info(SUBMITTY_DATA_DIR,obj["semester"],obj["course"],obj["gradeable"],obj["who"],obj["team"])

    is_batch_job = next_directory==BATCH_QUEUE
    is_batch_job_string = "BATCH" if is_batch_job else "INTERACTIVE"

    queue_time = get_queue_time(next_directory,next_to_grade)
    queue_time_longstring = dateutils.write_submitty_date(queue_time)
    grading_began = dateutils.get_current_time()
    waittime = int((grading_began-queue_time).total_seconds())
    grade_items_logging.log_message(is_batch_job,which_untrusted,submission_path,"wait:",waittime,"")

    # --------------------------------------------------------
    # various paths
    provided_code_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"provided_code",obj["gradeable"])
    test_input_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"test_input",obj["gradeable"])
    test_output_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"test_output",obj["gradeable"])
    custom_validation_code_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"custom_validation_code",obj["gradeable"])
    bin_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"bin")

    checkout_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"checkout",obj["gradeable"],obj["who"],str(obj["version"]))
    results_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"results",obj["gradeable"],obj["who"],str(obj["version"]))

    # grab a copy of the current history.json file (if it exists)
    history_file = os.path.join(results_path,"history.json")
    history_file_tmp = ""
    if os.path.isfile(history_file):
        filehandle,history_file_tmp = tempfile.mkstemp()
        shutil.copy(history_file,history_file_tmp)

    # get info from the gradeable config file
    json_config = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"config","form","form_"+obj["gradeable"]+".json")
    with open(json_config, 'r') as infile:
        gradeable_config_obj = json.load(infile)

    # get info from the gradeable config file
    complete_config = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"config","complete_config","complete_config_"+obj["gradeable"]+".json")
    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    checkout_subdirectory = complete_config_obj["autograding"].get("use_checkout_subdirectory","")
    checkout_subdir_path = os.path.join(checkout_path,checkout_subdirectory)

    # --------------------------------------------------------------------
    # MAKE TEMPORARY DIRECTORY & COPY THE NECESSARY FILES THERE
    tmp = os.path.join("/var/local/submitty/autograding_tmp/",which_untrusted,"tmp")
    shutil.rmtree(tmp,ignore_errors=True)
    os.makedirs(tmp)
    
    # switch to tmp directory
    os.chdir(tmp)

    # make the logs directory
    tmp_logs = os.path.join(tmp,"tmp_logs")
    os.makedirs(tmp_logs)

    # grab the submission time
    with open (os.path.join(submission_path,".submit.timestamp")) as submission_time_file:
        submission_string = submission_time_file.read().rstrip()
    
    submission_datetime = dateutils.read_submitty_date(submission_string)


    # --------------------------------------------------------------------
    # CHECKOUT THE STUDENT's REPO
    if is_vcs:
        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            print ("====================================\nVCS CHECKOUT", file=f)
            print ("vcs_subdirectory",vcs_subdirectory, file=f)
        # cleanup the previous checkout (if it exists)
        shutil.rmtree(checkout_path,ignore_errors=True)
        os.makedirs(checkout_path, exist_ok=True)
        subprocess.call (['/usr/bin/git', 'clone', vcs_subdirectory, checkout_path])
        os.chdir(checkout_path)

        # determine which version we need to checkout
        what_version = subprocess.check_output(['git', 'rev-list', '-n', '1', '--before="'+submission_string+'"', 'master'])
        what_version = str(what_version.decode('utf-8')).rstrip()
        if what_version == "":
            # oops, pressed the grade button before a valid commit
            shutil.rmtree(checkout_path,ignore_errors=True)
        else:
            # and check out the right version
            subprocess.call (['git', 'checkout', '-b', 'grade', what_version])
        os.chdir(tmp)
        subprocess.call(['ls', '-lR', checkout_path], stdout=open(tmp_logs + "/overall.txt", 'a'))


    # --------------------------------------------------------------------
    # START DOCKER

    container = None
    if USE_DOCKER:
        container = subprocess.check_output(['docker', 'run', '-t', '-d',
                                             '-v', tmp + ':' + tmp,
                                             'ubuntu:custom']).decode('utf8').strip()

    # --------------------------------------------------------------------
    # COMPILE THE SUBMITTED CODE

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nCOMPILATION STARTS", file=f)
    
    # copy submitted files to the tmp compilation directory
    tmp_compilation = os.path.join(tmp,"TMP_COMPILATION")
    os.mkdir(tmp_compilation)
    os.chdir(tmp_compilation)
    
    gradeable_deadline_string = gradeable_config_obj["date_due"]
    
    patterns_submission_to_compilation = complete_config_obj["autograding"]["submission_to_compilation"]
    pattern_copy("submission_to_compilation",patterns_submission_to_compilation,submission_path,tmp_compilation,tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_compilation",patterns_submission_to_compilation,checkout_subdir_path,tmp_compilation,tmp_logs)
    
    # copy any instructor provided code files to tmp compilation directory
    copy_contents_into(provided_code_path,tmp_compilation,tmp_logs)

    subprocess.call(['ls', '-lR', '.'], stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy compile.out to the current directory
    shutil.copy (os.path.join(bin_path,obj["gradeable"],"compile.out"),os.path.join(tmp_compilation,"my_compile.out"))

    # give the untrusted user read/write/execute permissions on the tmp directory & files
    add_permissions_recursive(tmp_compilation,
                              stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP,
                              stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP,
                              stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP)

    add_permissions(tmp,stat.S_IROTH | stat.S_IXOTH)
    add_permissions(tmp_logs,stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)

    with open(os.path.join(tmp_logs,"compilation_log.txt"), 'w') as logfile:
        if USE_DOCKER:
            compile_success = subprocess.call(['docker', 'exec', '-w', tmp_compilation, container,
                                               os.path.join(tmp_compilation, 'my_compile.out'), obj['gradeable'],
                                               obj['who'], str(obj['version']), submission_string], stdout=logfile)
        else:
            compile_success = subprocess.call([os.path.join(SUBMITTY_INSTALL_DIR,"bin","untrusted_execute"),
                                               which_untrusted,
                                               os.path.join(tmp_compilation,"my_compile.out"),
                                               obj["gradeable"],
                                               obj["who"],
                                               str(obj["version"]),
                                               submission_string],
                                              stdout=logfile)

    if compile_success == 0:
        print ("pid",my_pid,"COMPILATION OK")
    else:
        print ("pid",my_pid,"COMPILATION FAILURE")
        grade_items_logging.log_message(is_batch_job,which_untrusted,submission_path,"","","COMPILATION FAILURE")
    #raise SystemExit()

    untrusted_grant_rwx_access(which_untrusted,tmp_compilation)
        
    # remove the compilation program
    os.remove(os.path.join(tmp_compilation,"my_compile.out"))

    # return to the main tmp directory
    os.chdir(tmp)


    # --------------------------------------------------------------------
    # make the runner directory

    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        print ("====================================\nRUNNER STARTS", file=f)
        
    tmp_work = os.path.join(tmp,"TMP_WORK")
    os.makedirs(tmp_work)
    os.chdir(tmp_work)

    # move all executable files from the compilation directory to the main tmp directory
    # Note: Must preserve the directory structure of compiled files (esp for Java)

    patterns_submission_to_runner = complete_config_obj["autograding"]["submission_to_runner"]
    pattern_copy("submission_to_runner",patterns_submission_to_runner,submission_path,tmp_work,tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_runner",patterns_submission_to_runner,checkout_subdir_path,tmp_work,tmp_logs)

    patterns_compilation_to_runner = complete_config_obj["autograding"]["compilation_to_runner"]
    pattern_copy("compilation_to_runner",patterns_compilation_to_runner,tmp_compilation,tmp_work,tmp_logs)
        
    # copy input files to tmp_work directory
    copy_contents_into(test_input_path,tmp_work,tmp_logs)

    subprocess.call(['ls', '-lR', '.'], stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy runner.out to the current directory
    shutil.copy (os.path.join(bin_path,obj["gradeable"],"run.out"),os.path.join(tmp_work,"my_runner.out"))

    # give the untrusted user read/write/execute permissions on the tmp directory & files
    add_permissions_recursive(tmp_work,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    # raise SystemExit()
    # run the run.out as the untrusted user
    with open(os.path.join(tmp_logs,"runner_log.txt"), 'w') as logfile:
        if USE_DOCKER:
            runner_success = subprocess.call(['docker', 'exec', '-w', tmp_work, container,
                                              os.path.join(tmp_work, 'my_runner.out'), obj['gradeable'],
                                              obj['who'], str(obj['version']), submission_string], stdout=logfile)
        else:
            runner_success = subprocess.call([os.path.join(SUBMITTY_INSTALL_DIR,"bin","untrusted_execute"),
                                              which_untrusted,
                                              os.path.join(tmp_work,"my_runner.out"),
                                              obj["gradeable"],
                                              obj["who"],
                                              str(obj["version"]),
                                              submission_string],
                                              stdout=logfile)

    if runner_success == 0:
        print ("pid",my_pid,"RUNNER OK")
    else:
        print ("pid",my_pid,"RUNNER FAILURE")
        grade_items_logging.log_message(is_batch_job,which_untrusted,submission_path,"","","RUNNER FAILURE")

    untrusted_grant_rwx_access(which_untrusted,tmp_work)
    untrusted_grant_rwx_access(which_untrusted,tmp_compilation)

    # --------------------------------------------------------------------
    # RUN VALIDATOR

    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        print ("====================================\nVALIDATION STARTS", file=f)

    # copy results files from compilation...
    patterns_submission_to_validation = complete_config_obj["autograding"]["submission_to_validation"]
    pattern_copy("submission_to_validation",patterns_submission_to_validation,submission_path,tmp_work,tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_validation",patterns_submission_to_validation,checkout_subdir_path,tmp_work,tmp_logs)
    patterns_compilation_to_validation = complete_config_obj["autograding"]["compilation_to_validation"]
    pattern_copy("compilation_to_validation",patterns_compilation_to_validation,tmp_compilation,tmp_work,tmp_logs)

    # remove the compilation directory
    shutil.rmtree(tmp_compilation)

    # copy output files to tmp_work directory
    copy_contents_into(test_output_path,tmp_work,tmp_logs)

    # copy any instructor custom validation code into the tmp work directory
    copy_contents_into(custom_validation_code_path,tmp_work,tmp_logs)

    subprocess.call(['ls', '-lR', '.'], stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy validator.out to the current directory
    shutil.copy (os.path.join(bin_path,obj["gradeable"],"validate.out"),os.path.join(tmp_work,"my_validator.out"))

    # give the untrusted user read/write/execute permissions on the tmp directory & files
    add_permissions_recursive(tmp_work,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    add_permissions(os.path.join(tmp_work,"my_validator.out"),stat.S_IROTH | stat.S_IXOTH)

    # validator the validator.out as the untrusted user
    with open(os.path.join(tmp_logs,"validator_log.txt"), 'w') as logfile:
        if USE_DOCKER:
            validator_success = subprocess.call(['docker', 'exec', '-w', tmp_work, container,
                                                 os.path.join(tmp_work, 'my_validator.out'), obj['gradeable'],
                                                 obj['who'], str(obj['version']), submission_string], stdout=logfile)
        else:
            validator_success = subprocess.call([os.path.join(SUBMITTY_INSTALL_DIR,"bin","untrusted_execute"),
                                                 which_untrusted,
                                                 os.path.join(tmp_work,"my_validator.out"),
                                                 obj["gradeable"],
                                                 obj["who"],
                                                 str(obj["version"]),
                                                 submission_string],
                                                stdout=logfile)

    if validator_success == 0:
        print ("pid",my_pid,"VALIDATOR OK")
    else:
        print ("pid",my_pid,"VALIDATOR FAILURE")
        grade_items_logging.log_message(is_batch_job,which_untrusted,submission_path,"","","VALIDATION FAILURE")

    untrusted_grant_rwx_access(which_untrusted,tmp_work)

    # grab the result of autograding
    grade_result = ""
    with open(os.path.join(tmp_work,"grade.txt")) as f:
        lines = f.readlines()
        for line in lines:
            line = line.rstrip('\n')
            if line.startswith("Automatic grading total:"):
                grade_result = line

    # --------------------------------------------------------------------
    # MAKE RESULTS DIRECTORY & COPY ALL THE FILES THERE

    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        print ("====================================\nARCHIVING STARTS", file=f)

    subprocess.call(['ls', '-lR', '.'], stdout=open(tmp_logs + "/overall.txt", 'a'))

    os.chdir(bin_path)

    # save the old results path!
    if os.path.isdir(os.path.join(results_path,"OLD")):
        shutil.move(os.path.join(results_path,"OLD"),
                    os.path.join(tmp,"OLD_RESULTS"))

    # clean out all of the old files if this is a re-run
    shutil.rmtree(results_path,ignore_errors=True)

    # create the directory (and the full path if it doesn't already exist)
    os.makedirs(results_path)

    # bring back the old results!
    if os.path.isdir(os.path.join(tmp,"OLD_RESULTS")):
        shutil.move(os.path.join(tmp,"OLD_RESULTS"),
                    os.path.join(results_path,"OLD"))

    os.makedirs(os.path.join(results_path,"details"))

    patterns_work_to_details = complete_config_obj["autograding"]["work_to_details"]
    pattern_copy("work_to_details",patterns_work_to_details,tmp_work,os.path.join(results_path,"details"),tmp_logs)

    if not history_file_tmp == "":
        shutil.move(history_file_tmp,history_file)
        # fix permissions
        ta_group_id = os.stat(results_path).st_gid
        os.chown(history_file,int(HWCRON_UID),ta_group_id)
        add_permissions(history_file,stat.S_IRGRP)
        
    grading_finished = dateutils.get_current_time()

    shutil.copy(os.path.join(tmp_work,"results.json"),results_path)
    shutil.copy(os.path.join(tmp_work,"grade.txt"),results_path)

    # -------------------------------------------------------------
    # create/append to the results history

    gradeable_deadline_datetime = dateutils.read_submitty_date(gradeable_deadline_string)
    gradeable_deadline_longstring = dateutils.write_submitty_date(gradeable_deadline_datetime)
    submission_longstring = dateutils.write_submitty_date(submission_datetime)
    
    seconds_late = int((submission_datetime-gradeable_deadline_datetime).total_seconds())
    # note: negative = not late

    grading_began_longstring = dateutils.write_submitty_date(grading_began)
    grading_finished_longstring = dateutils.write_submitty_date(grading_finished)

    gradingtime = int((grading_finished-grading_began).total_seconds())

    write_grade_history.just_write_grade_history(history_file,
                                                 gradeable_deadline_longstring,
                                                 submission_longstring,
                                                 seconds_late,
                                                 queue_time_longstring,
                                                 is_batch_job_string,
                                                 grading_began_longstring,
                                                 waittime,
                                                 grading_finished_longstring,
                                                 gradingtime,
                                                 grade_result)

    #---------------------------------------------------------------------
    # WRITE OUT VERSION DETAILS
    if WRITE_DATABASE:
        insert_database_version_data.insert_to_database(
            obj["semester"],
            obj["course"],
            obj["gradeable"],
            obj["user"],
            obj["team"],
            obj["who"],
            True if obj["is_team"] else False,
            str(obj["version"]))

    print ("pid",my_pid,"finished grading ", next_to_grade, " in ", gradingtime, " seconds")

    grade_items_logging.log_message(is_batch_job,which_untrusted,submission_path,"grade:",gradingtime,grade_result)

    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        f.write("FINISHED GRADING!")

    # save the logs!
    shutil.copytree(tmp_logs,os.path.join(results_path,"logs"))

    # --------------------------------------------------------------------
    # REMOVE TEMP DIRECTORY
    shutil.rmtree(tmp)

    # --------------------------------------------------------------------
    # CLEAN UP DOCKER
    if USE_DOCKER:
        subprocess.call(['docker', 'rm', '-f', container])